From 1ed8d49b305a4ed03578eb251b18e5bd193ec04c Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sat, 21 Jan 2023 01:33:27 +0100 Subject: [PATCH 01/15] fix builtin completions if `label_details_support` is false (#924) --- src/Server.zig | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index e3bef95..d5ece4e 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -1326,16 +1326,15 @@ fn completeBuiltin(server: *Server) error{OutOfMemory}!?[]types.CompletionItem { }); } - var completions = try allocator.alloc(types.CompletionItem, builtin_completions.items.len); + var completions = try builtin_completions.clone(allocator); if (server.client_capabilities.label_details_support) { - for (builtin_completions.items) |item, i| { - completions[i] = item; - try formatDetailledLabel(&completions[i], allocator); + for (completions.items) |*item| { + try formatDetailledLabel(item, allocator); } } - return completions; + return completions.items; } fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}![]types.CompletionItem { From ea05916e6904979cac6a88232210aad33f008ffc Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 11 Jan 2023 17:30:47 +0000 Subject: [PATCH 02/15] Goto definition works when the cursor is at the start of the identifier. Before, the code lexed only a prefix of the line up to cursor position. Now, we lex the entire line, and do look at the token just after the cursor. This subtly changes sematncih of `getPostionContext`: now it is becomes oblivious of the _exact_ position of the cursor and returns the whole token at cursor's position. I believe this is semantically right approach -- _most_ of the callsite should not worry at all about such details. Something like completion _might_ want to know more, but it's better to make that call site's problem. It might be the case that some existing code relies on the past behavior. It's hard to tell though -- we don't have a lot of tests for _features_, and changes to unit-tests don't explain if the changes are meaningful for user-observable behavior or not. In general, for LSP-shaped thing, I feel that the bulk of testing should be focused on end-to-end behaviors.... --- src/analysis.zig | 18 +++++++++++++++--- tests/lsp_features/definition.zig | 9 ++------- tests/utility/position_context.zig | 30 +++++++++++++++--------------- 3 files changed, 32 insertions(+), 25 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index cf92e92..7fdd9e5 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1553,9 +1553,15 @@ fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc { }; } +/// Given a byte index in a document (typically cursor offset), classify what kind of entity is at that index. +/// +/// Classification is based on the lexical structure -- we fetch the line containin index, tokenize it, +/// and look at the sequence of tokens just before the cursor. Due to the nice way zig is designed (only line +/// comments, etc) lexing just a single line is always correct. pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_index: usize) !PositionContext { - const line_loc = offsets.lineLocUntilIndex(text, doc_index); + const line_loc = offsets.lineLocAtIndex(text, doc_index); const line = offsets.locToSlice(text, line_loc); + const prev_char = if (doc_index > 0) text[doc_index - 1] else 0; const is_comment = std.mem.startsWith(u8, std.mem.trimLeft(u8, line, " \t"), "//"); if (is_comment) return .comment; @@ -1576,10 +1582,16 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in while (true) { const tok = tokenizer.next(); // Early exits. + if (tok.loc.start > doc_index) break; + if (tok.loc.start == doc_index) { + // Tie-breaking, the curosr is exactly between two tokens, and + // `tok` is the latter of the two. + if (tok.tag != .identifier) break; + } switch (tok.tag) { .invalid => { // Single '@' do not return a builtin token so we check this on our own. - if (line[line.len - 1] == '@') { + if (prev_char == '@') { return PositionContext{ .builtin = .{ .start = line_loc.end - 1, @@ -1685,7 +1697,7 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in .label => |filled| { // We need to check this because the state could be a filled // label if only a space follows it - if (!filled or line[line.len - 1] != ' ') { + if (!filled or prev_char != ' ') { return state.ctx; } }, diff --git a/tests/lsp_features/definition.zig b/tests/lsp_features/definition.zig index caed193..ea4e2b0 100644 --- a/tests/lsp_features/definition.zig +++ b/tests/lsp_features/definition.zig @@ -28,15 +28,10 @@ test "definition - cursor is at the end of an identifier" { } test "definition - cursor is at the start of an identifier" { - testDefinition( + try testDefinition( \\fn main() void { <>foo(); } \\fn foo() void {} - ) catch |err| switch (err) { - error.UnresolvedDefinition => { - // TODO: #891 - }, - else => return err, - }; + ); } fn testDefinition(source: []const u8) !void { diff --git a/tests/utility/position_context.zig b/tests/utility/position_context.zig index 2735061..4fd778a 100644 --- a/tests/utility/position_context.zig +++ b/tests/utility/position_context.zig @@ -12,7 +12,7 @@ test "position context - var access" { \\const this_var = identifier; , .var_access, - "id", + "identifier", ); try testContext( \\const this_var = identifier; @@ -40,37 +40,37 @@ test "position context - field access" { \\if (foo.field == foo) { , .field_access, - "foo.", + "foo.field", ); try testContext( \\if (foo.member.field == foo) { , .field_access, - "foo.member.", + "foo.member.field", ); try testContext( \\if (foo.*.?.field == foo) { , .field_access, - "foo.*.?.", + "foo.*.?.field", ); try testContext( \\if (foo[0].field == foo) { , .field_access, - "foo[0].", + "foo[0].field", ); try testContext( \\if (foo.@"field" == foo) { , .field_access, - "foo.", + "foo.@\"field\"", ); try testContext( \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator); , .field_access, - "std.ArrayList(SomeStruct(a, b, c, d)).in", + "std.ArrayList(SomeStruct(a, b, c, d)).init", ); try testContext( \\fn foo() !Foo.b { @@ -122,13 +122,13 @@ test "position context - import/embedfile string literal" { \\const std = @import("st"); , .import_string_literal, - "\"st", // maybe report just "st" + "\"st\"", // maybe report just "st" ); try testContext( \\const std = @embedFile("file."); , .embedfile_string_literal, - "\"file.", // maybe report just "file." + "\"file.\"", // maybe report just "file." ); } @@ -137,13 +137,13 @@ test "position context - string literal" { \\var foo = "hello world!"; , .string_literal, - "\"he", // maybe report just "he" + "\"hello world!\"", // maybe report just "hello world!" ); try testContext( \\var foo = \\hello; , .string_literal, - "\\\\hello", // maybe report just "hello" + "\\\\hello;", // maybe report just "hello;" ); } @@ -237,7 +237,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "".len ..] }); defer allocator.free(final_line); - const ctx = try analysis.getPositionContext(allocator, line, cursor_idx); + const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx); if (std.meta.activeTag(ctx) != tag) { std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) }); @@ -253,7 +253,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma const expected_range = maybe_range orelse { std.debug.print("Expected null range, got `{s}`\n", .{ - line[actual_loc.start..actual_loc.end], + final_line[actual_loc.start..actual_loc.end], }); return error.DifferentRange; }; @@ -263,8 +263,8 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) { std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{ - line[expected_range_start..expected_range_end], expected_range_start, expected_range_end, - line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end, + final_line[expected_range_start..expected_range_end], expected_range_start, expected_range_end, + final_line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end, }); return error.DifferentRange; } From 5afaf2ae3af0cdf6ed8bfc7c335d0d9641ee445e Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sat, 21 Jan 2023 19:32:12 +0100 Subject: [PATCH 03/15] optimize folding range (#926) --- src/Server.zig | 62 +++++++++++++++++++++++--------------------------- 1 file changed, 29 insertions(+), 33 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index d5ece4e..3456170 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -2508,6 +2508,9 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty } fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]types.CodeAction { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; var builder = code_actions.Builder{ @@ -2536,6 +2539,9 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[] } fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error!?[]types.FoldingRange { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + const Token = std.zig.Token; const Node = Ast.Node; const allocator = server.arena.allocator(); @@ -2543,37 +2549,26 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const helper = struct { const Inclusivity = enum { inclusive, exclusive }; - /// Returns true if added. - fn maybeAddTokRange( - p_ranges: *std.ArrayList(types.FoldingRange), - tree: Ast, - start: Ast.TokenIndex, - end: Ast.TokenIndex, - end_reach: Inclusivity, - encoding: offsets.Encoding, - ) std.mem.Allocator.Error!bool { - const can_add = start < end and !tree.tokensOnSameLine(start, end); - if (can_add) { - try addTokRange(p_ranges, tree, start, end, end_reach, encoding); - } - return can_add; - } + fn addTokRange( p_ranges: *std.ArrayList(types.FoldingRange), tree: Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, end_reach: Inclusivity, - encoding: offsets.Encoding, ) std.mem.Allocator.Error!void { - std.debug.assert(!std.debug.runtime_safety or !tree.tokensOnSameLine(start, end)); + if (tree.tokensOnSameLine(start, end)) return; + std.debug.assert(start <= end); - const start_line = offsets.tokenToPosition(tree, start, encoding).line; - const end_line = offsets.tokenToPosition(tree, end, encoding).line; + const start_index = offsets.tokenToIndex(tree, start); + const end_index = offsets.tokenToIndex(tree, end); + + const start_line = std.mem.count(u8, tree.source[0..start_index], "\n"); + const end_line = start_line + std.mem.count(u8, tree.source[start_index..end_index], "\n"); try p_ranges.append(.{ - .startLine = start_line, - .endLine = end_line - @boolToInt(end_reach == .exclusive), + .startLine = @intCast(u32, start_line), + .endLine = @intCast(u32, end_line) - @boolToInt(end_reach == .exclusive), }); } }; @@ -2604,6 +2599,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const node = @intCast(Node.Index, i); switch (node_tag) { + .root => continue, // only fold the expression pertaining to the if statement, and the else statement, each respectively. // TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures. .@"if", .if_simple => { @@ -2611,14 +2607,14 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr); const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive); if (if_full.ast.else_expr == 0) continue; const start_tok_2 = if_full.else_token; const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive); }, // same as if/else @@ -2632,13 +2628,13 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr); const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive); if (loop_full.ast.else_expr == 0) continue; const start_tok_2 = loop_full.else_token; const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive); }, .global_var_decl, @@ -2666,7 +2662,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error start_doc_tok -= 1; } - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive); } // Function prototype folding regions @@ -2678,10 +2674,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node); if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk; - try ranges.ensureUnusedCapacity(1 + fn_proto.ast.params.len); // best guess, doesn't include anytype params - helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive, server.offset_encoding) catch |err| switch (err) { - error.OutOfMemory => unreachable, - }; + try helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive); var it = fn_proto.iterate(&handle.tree); while (ast.nextFnParam(&it)) |param| { @@ -2691,7 +2684,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error while (token_tags[doc_end_tok + 1] == .doc_comment) doc_end_tok += 1; - _ = try helper.maybeAddTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive); } }, @@ -2703,7 +2696,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error => { const start_tok = handle.tree.firstToken(node); const end_tok = ast.lastToken(handle.tree, node); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive); }, // most other trivial cases can go through here. @@ -2750,7 +2743,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error const start_tok = handle.tree.firstToken(node); const end_tok = ast.lastToken(handle.tree, node); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive, server.offset_encoding); + try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive); }, } } @@ -2799,6 +2792,9 @@ pub const SelectionRange = struct { }; fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) Error!?[]*SelectionRange { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + const allocator = server.arena.allocator(); const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; From d670a4bbf221107e14f892716bd12c5f0a363427 Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sat, 21 Jan 2023 18:27:16 -0500 Subject: [PATCH 04/15] Add fuzzing workflow (#927) --- .github/workflows/fuzz.yml | 92 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 .github/workflows/fuzz.yml diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml new file mode 100644 index 0000000..d625098 --- /dev/null +++ b/.github/workflows/fuzz.yml @@ -0,0 +1,92 @@ +name: Sus Fuzzing + +on: + push: + branches: + - master + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + inputs: + fuzzing_duration: + type: string + description: How long should fuzzing last? (sleep time argument) + default: 15m + +jobs: + fuzz: + runs-on: ubuntu-latest + + steps: + - name: Set Swap Space + uses: pierotofy/set-swap-space@master + with: + swap-size-gb: 10 + + - name: Grab zig + uses: goto-bus-stop/setup-zig@v1 + with: + version: master + + - run: zig version + - run: zig env + + - name: Checkout zig + uses: actions/checkout@v3 + with: + path: zig + repository: "ziglang/zig" + fetch-depth: 0 + + - name: Checkout zls + uses: actions/checkout@v3 + with: + path: zls + fetch-depth: 0 + submodules: true + + - name: Build zls + run: | + cd $GITHUB_WORKSPACE/zls + pwd + zig build + + - name: Checkout sus + uses: actions/checkout@v3 + with: + path: sus + repository: "zigtools/sus" + fetch-depth: 0 + submodules: recursive + + - name: Build sus + run: | + cd $GITHUB_WORKSPACE/sus + pwd + zig build -Drelease-fast + + - name: Run sus + continue-on-error: true + run: | + cd $GITHUB_WORKSPACE/sus + FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }} + { sleep ${FUZZING_DURATION:-1h}; pkill -9 sus; } & + ./zig-out/bin/sus $GITHUB_WORKSPACE/zls/zig-out/bin/zls markov $GITHUB_WORKSPACE/zig/lib/std + + - name: Upload saved logs + uses: actions/upload-artifact@v3 + with: + name: saved-logs + path: sus/saved_logs/ + + - uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }} + secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }} + space_name: fuzzing-output + space_region: nyc3 + source: sus/saved_logs/ + out_dir: ${{ github.repository_owner }}-${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.sha }} + + + \ No newline at end of file From 211a852efc2700e7e0cdfcbcc2440867d9f71436 Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sat, 21 Jan 2023 21:49:05 -0500 Subject: [PATCH 05/15] Fix fuzzing paths (#928) --- .github/workflows/fuzz.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index d625098..7356fd8 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -86,7 +86,7 @@ jobs: space_name: fuzzing-output space_region: nyc3 source: sus/saved_logs/ - out_dir: ${{ github.repository_owner }}-${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.sha }} + out_dir: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.sha }} \ No newline at end of file From 7b35c6e5ebdde6dfa25295ac4480872feb8f4273 Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 22 Jan 2023 03:30:10 -0500 Subject: [PATCH 06/15] Last fuzzing PR for a while hopefully (#929) --- .github/workflows/fuzz.yml | 16 +++++++++++++++- .github/workflows/main.yml | 16 ---------------- 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 7356fd8..b1312a0 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -1,7 +1,11 @@ name: Sus Fuzzing on: + pull_request: + types: [labeled, synchronize] push: + paths: + - "**.zig" branches: - master schedule: @@ -15,6 +19,7 @@ on: jobs: fuzz: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz') runs-on: ubuntu-latest steps: @@ -23,6 +28,16 @@ jobs: with: swap-size-gb: 10 + - name: Default fuzzing duration + if: github.event_name != 'pull_request' + run: | + FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }} + + - name: PR fuzzing duration + if: github.event_name == 'pull_request' + run: | + FUZZING_DURATION=15m + - name: Grab zig uses: goto-bus-stop/setup-zig@v1 with: @@ -69,7 +84,6 @@ jobs: continue-on-error: true run: | cd $GITHUB_WORKSPACE/sus - FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }} { sleep ${FUZZING_DURATION:-1h}; pkill -9 sus; } & ./zig-out/bin/sus $GITHUB_WORKSPACE/zls/zig-out/bin/zls markov $GITHUB_WORKSPACE/zig/lib/std diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2df721f..5ad4166 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -122,19 +122,3 @@ jobs: REMOTE_HOST: ${{ secrets.WEBSITE_DEPLOY_HOST }} REMOTE_USER: ${{ secrets.WEBSITE_DEPLOY_USER }} TARGET: ${{ secrets.WEBSITE_DEPLOY_FOLDER }} - - - name: Instruct fuzzing server to pull latest zls - if: ${{ matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/master' && github.repository_owner == 'zigtools' }} - uses: appleboy/ssh-action@v0.1.7 - with: - host: fuzzing.zigtools.org - username: ${{ secrets.FUZZING_SSH_USERNAME }} - key: ${{ secrets.FUZZING_SSH_PRIVKEY }} - script: | - systemctl stop fuzzing - systemctl stop fuzzing-web - cd /root/sus - ./script/setup.sh - systemctl start fuzzing - sleep 5s - systemctl start fuzzing-web From 12e996ad966fdd04f931655ee32fb19f473cb2e2 Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 22 Jan 2023 05:40:03 -0500 Subject: [PATCH 07/15] Fix env transfer (#932) --- .github/workflows/fuzz.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index b1312a0..aef4c5c 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -31,12 +31,12 @@ jobs: - name: Default fuzzing duration if: github.event_name != 'pull_request' run: | - FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }} + echo "FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }}" >> $GITHUB_ENV - name: PR fuzzing duration if: github.event_name == 'pull_request' run: | - FUZZING_DURATION=15m + echo "FUZZING_DURATION=15m" >> $GITHUB_ENV - name: Grab zig uses: goto-bus-stop/setup-zig@v1 @@ -84,6 +84,7 @@ jobs: continue-on-error: true run: | cd $GITHUB_WORKSPACE/sus + FUZZING_DURATION=${{ env.FUZZING_DURATION }} { sleep ${FUZZING_DURATION:-1h}; pkill -9 sus; } & ./zig-out/bin/sus $GITHUB_WORKSPACE/zls/zig-out/bin/zls markov $GITHUB_WORKSPACE/zig/lib/std From e2307d7dbf42291e4614d0e46f817f746dafcda4 Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 22 Jan 2023 05:46:35 -0500 Subject: [PATCH 08/15] Fix fuzzing ref (#933) --- .github/workflows/fuzz.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index aef4c5c..cd532a0 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -101,7 +101,7 @@ jobs: space_name: fuzzing-output space_region: nyc3 source: sus/saved_logs/ - out_dir: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.ref_name }}/${{ github.sha }} + out_dir: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.head_ref || github.ref_name }}/${{ github.sha }} \ No newline at end of file From 3a86687ae7208dd5e03bc1bad2a832238cd2200b Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 22 Jan 2023 14:54:38 -0500 Subject: [PATCH 09/15] Fix secrets access in foreign PRs (#934) --- .github/workflows/fuzz.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index cd532a0..dea1026 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -1,7 +1,13 @@ name: Sus Fuzzing on: - pull_request: + # pull_request_target can be dangerous but necessary here to access secrets. + # I'm pretty comfortable using it because: + # - We limit all permissions (including GITHUB_TOKEN) to read-only + # - We limit runs to labelled PRs only which prevents random exploitation + # - We don't expose secrets in environment variables which makes exploitation much more difficult + # - The secrets that we reference aren't all that important anyways (they can only access our DigitalOcean Space) + pull_request_target: types: [labeled, synchronize] push: paths: @@ -17,6 +23,8 @@ on: description: How long should fuzzing last? (sleep time argument) default: 15m +permissions: read-all + jobs: fuzz: if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz') From 8e98bd439bd3b35c85728422cbcf27cfb796f8eb Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 22 Jan 2023 15:27:17 -0500 Subject: [PATCH 10/15] pull_request -> pull_request_target all over (#935) --- .github/workflows/fuzz.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index dea1026..be32b94 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ permissions: read-all jobs: fuzz: - if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz') + if: github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz') runs-on: ubuntu-latest steps: @@ -37,12 +37,12 @@ jobs: swap-size-gb: 10 - name: Default fuzzing duration - if: github.event_name != 'pull_request' + if: github.event_name != 'pull_request_target' run: | echo "FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }}" >> $GITHUB_ENV - name: PR fuzzing duration - if: github.event_name == 'pull_request' + if: github.event_name == 'pull_request_target' run: | echo "FUZZING_DURATION=15m" >> $GITHUB_ENV From 903f85ab946ca0479704e8d35e8b6219dfafe0d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lan=20Cr=C3=ADstoffer?= Date: Sun, 22 Jan 2023 21:47:53 +0100 Subject: [PATCH 11/15] Fix lsp weird behaviour on block cursors (#891) (#905) * Fix lsp weird behaviour on block cursors (#891) Adds lookahead option to getPositionContext. --- src/Server.zig | 16 +- src/analysis.zig | 27 ++- tests/utility/position_context.zig | 338 ++++++++++++++++++++++++++--- 3 files changed, 336 insertions(+), 45 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index 3456170..bde5f03 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -837,12 +837,12 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) [] if (pos_index + 1 >= handle.text.len) return ""; var start_idx = pos_index; - while (start_idx > 0 and isSymbolChar(handle.text[start_idx - 1])) { + while (start_idx > 0 and analysis.isSymbolChar(handle.text[start_idx - 1])) { start_idx -= 1; } var end_idx = pos_index; - while (end_idx < handle.text.len and isSymbolChar(handle.text[end_idx])) { + while (end_idx < handle.text.len and analysis.isSymbolChar(handle.text[end_idx])) { end_idx += 1; } @@ -850,10 +850,6 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) [] return handle.text[start_idx..end_idx]; } -fn isSymbolChar(char: u8) bool { - return std.ascii.isAlphanumeric(char) or char == '_'; -} - fn gotoDefinitionSymbol( server: *Server, decl_handle: analysis.DeclWithHandle, @@ -2124,7 +2120,7 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty } const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); - const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); + const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, false); const maybe_completions = switch (pos_context) { .builtin => try server.completeBuiltin(), @@ -2205,7 +2201,7 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol if (request.position.character == 0) return null; const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); - const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); + const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); return switch (pos_context) { .var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias), @@ -2245,7 +2241,7 @@ fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Hover if (request.position.character == 0) return null; const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); - const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); + const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const response = switch (pos_context) { .builtin => try server.hoverDefinitionBuiltin(source_index, handle), @@ -2391,7 +2387,7 @@ fn generalReferencesHandler(server: *Server, request: GeneralReferencesRequest) if (request.position().character <= 0) return null; const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding); - const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); + const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const decl = switch (pos_context) { .var_access => try server.getSymbolGlobal(source_index, handle), diff --git a/src/analysis.zig b/src/analysis.zig index 7fdd9e5..cf97fe0 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1553,15 +1553,32 @@ fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc { }; } +pub fn isSymbolChar(char: u8) bool { + return std.ascii.isAlphanumeric(char) or char == '_'; +} + /// Given a byte index in a document (typically cursor offset), classify what kind of entity is at that index. /// /// Classification is based on the lexical structure -- we fetch the line containin index, tokenize it, /// and look at the sequence of tokens just before the cursor. Due to the nice way zig is designed (only line /// comments, etc) lexing just a single line is always correct. -pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_index: usize) !PositionContext { - const line_loc = offsets.lineLocAtIndex(text, doc_index); +pub fn getPositionContext( + allocator: std.mem.Allocator, + text: []const u8, + doc_index: usize, + /// Should we look to the end of the current context? Yes for goto def, no for completions + lookahead: bool, +) !PositionContext { + var new_index = doc_index; + if (lookahead and new_index < text.len and isSymbolChar(text[new_index])) { + new_index += 1; + } else if (lookahead and new_index + 1 < text.len and text[new_index] == '@') { + new_index += 2; + } + + const line_loc = if (!lookahead) offsets.lineLocAtIndex(text, new_index) else offsets.lineLocUntilIndex(text, new_index); const line = offsets.locToSlice(text, line_loc); - const prev_char = if (doc_index > 0) text[doc_index - 1] else 0; + const prev_char = if (new_index > 0) text[new_index - 1] else 0; const is_comment = std.mem.startsWith(u8, std.mem.trimLeft(u8, line, " \t"), "//"); if (is_comment) return .comment; @@ -1582,8 +1599,8 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in while (true) { const tok = tokenizer.next(); // Early exits. - if (tok.loc.start > doc_index) break; - if (tok.loc.start == doc_index) { + if (tok.loc.start > new_index) break; + if (tok.loc.start == new_index) { // Tie-breaking, the curosr is exactly between two tokens, and // `tok` is the latter of the two. if (tok.tag != .identifier) break; diff --git a/tests/utility/position_context.zig b/tests/utility/position_context.zig index 4fd778a..01f13d6 100644 --- a/tests/utility/position_context.zig +++ b/tests/utility/position_context.zig @@ -9,16 +9,47 @@ const allocator = std.testing.allocator; test "position context - var access" { try testContext( - \\const this_var = identifier; + \\const a_var = identifier; + , + .empty, + null, + ); + try testContext( + \\const a_var = identifier; + , + .var_access, + "i", + ); + try testContext( + \\const a_var = identifier; + , + .var_access, + "ident", + ); + try testContext( + \\const a_var = identifier; , .var_access, "identifier", ); try testContext( - \\const this_var = identifier; + \\const a_var = identifier; + , + .empty, + null, + ); + + try testContext( + \\ fn foo() !Str { , .var_access, - "identifier", + "S", + ); + try testContext( + \\ fn foo() !Str { + , + .var_access, + "Str", ); try testContext( \\ fn foo() !Str { @@ -26,74 +57,259 @@ test "position context - var access" { .var_access, "Str", ); - // TODO fix failing test! + try testContext( + \\ fn foo() !Str { + , + .var_access, + "Str", + ); + + // TODO fix failing tests + // try testContext( + // \\ fn foo() Err!void { + // , + // .var_access, + // "E", + // ); + // try testContext( + // \\ fn foo() Err!void { + // , + // .var_access, + // "Err", + // ); // try testContext( // \\ fn foo() Err!void { // , // .var_access, // "Err", // ); + // try testContext( + // \\ fn foo() Err!void { + // , + // .var_access, + // "v", + // ); + + try testContext( + \\if (bar.field == foo) { + , + .var_access, + "b", + ); + try testContext( + \\if (bar.field == foo) { + , + .var_access, + "bar", + ); + try testContext( + \\if (bar.field == foo) { + , + .var_access, + "bar", + ); + + try testContext( + \\if (bar[0].field == foo) { + , + .var_access, + "bar", + ); } test "position context - field access" { try testContext( - \\if (foo.field == foo) { + \\if (bar.field == foo) { , .field_access, - "foo.field", + "bar.f", ); try testContext( - \\if (foo.member.field == foo) { + \\if (bar.field == foo) { , .field_access, - "foo.member.field", + "bar.fiel", ); try testContext( - \\if (foo.*.?.field == foo) { + \\if (bar.field == foo) { , .field_access, - "foo.*.?.field", + "bar.field", + ); + + try testContext( + \\if (bar.member.field == foo) { + , + .field_access, + "bar.member", ); try testContext( - \\if (foo[0].field == foo) { + \\if (bar.member.field == foo) { , .field_access, - "foo[0].field", + "bar.member.f", ); try testContext( - \\if (foo.@"field" == foo) { + \\if (bar.member.field == foo) { , .field_access, - "foo.@\"field\"", + "bar.member.fiel", + ); + try testContext( + \\if (bar.member.field == foo) { + , + .field_access, + "bar.member.field", + ); + + try testContext( + \\if (bar.*.?.field == foo) { + , + .field_access, + "bar.*.?", + ); + try testContext( + \\if (bar.*.?.field == foo) { + , + .field_access, + "bar.*.?.f", + ); + + try testContext( + \\if (bar[0].field == foo) { + , + .field_access, + "bar[0].f", + ); + + try testContext( + \\if (bar.@"field" == foo) { + , + .field_access, + "bar.@\"", + ); + try testContext( + \\if (bar.@"field" == foo) { + , + .field_access, + "bar.@\"fiel", + ); + try testContext( + \\if (bar.@"field" == foo) { + , + .field_access, + "bar.@\"field\"", + ); + + try testContext( + \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator); + , + .field_access, + "std.ArrayList(SomeStruct(a, b, c, d)).i", ); try testContext( \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator); , .field_access, - "std.ArrayList(SomeStruct(a, b, c, d)).init", + "std.ArrayList(SomeStruct(a, b, c, d)).ini", ); try testContext( - \\fn foo() !Foo.b { + \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator); + , + .field_access, + "std.ArrayList(SomeStruct(a, b, c, d)).init", + ); + + try testContext( + \\fn foo() !Foo.bar { , .field_access, "Foo.b", ); - // TODO fix failing test! + try testContext( + \\fn foo() !Foo.bar { + , + .field_access, + "Foo.bar", + ); + try testContext( + \\fn foo() !Foo.bar { + , + .field_access, + "Foo.bar", + ); + + // TODO fix failing tests // try testContext( - // \\fn foo() Foo.b!void { + // \\fn foo() Foo.bar!void { // , // .field_access, // "Foo.b", // ); + // try testContext( + // \\fn foo() Foo.bar!void { + // , + // .field_access, + // "Foo.bar", + // ); + // try testContext( + // \\fn foo() Foo.bar!void { + // , + // .field_access, + // "Foo.bar", + // ); } test "position context - builtin" { + try testContext( + \\var foo = @ + , + .empty, + null, + ); + try testContext( + \\var foo = @intC(u32, 5); + , + .builtin, + "@i", + ); + try testContext( + \\var foo = @intC(u32, 5); + , + .builtin, + "@i", + ); + try testContext( + \\var foo = @intC(u32, 5); + , + .builtin, + "@intC", + ); try testContext( \\var foo = @intC(u32, 5); , .builtin, "@intC", ); + + try testContext( + \\fn foo() void { @setRuntime(false); }; + , + .builtin, + "@s", + ); + try testContext( + \\fn foo() void { @setRuntime(false); }; + , + .builtin, + "@s", + ); + try testContext( + \\fn foo() void { @setRuntime(false); }; + , + .builtin, + "@setR", + ); try testContext( \\fn foo() void { @setRuntime(false); }; , @@ -118,17 +334,29 @@ test "position context - comment" { } test "position context - import/embedfile string literal" { + try testContext( + \\const std = @import("st"); + , + .import_string_literal, + "\"st", // maybe report just "st" + ); try testContext( \\const std = @import("st"); , .import_string_literal, - "\"st\"", // maybe report just "st" + "\"st", // maybe report just "st" ); try testContext( \\const std = @embedFile("file."); , .embedfile_string_literal, - "\"file.\"", // maybe report just "file." + "\"file.", // maybe report just "file." + ); + try testContext( + \\const std = @embedFile("file."); + , + .embedfile_string_literal, + "\"file", // maybe report just "file." ); } @@ -137,29 +365,49 @@ test "position context - string literal" { \\var foo = "hello world!"; , .string_literal, - "\"hello world!\"", // maybe report just "hello world!" + "\"hel", // maybe report just "he" ); try testContext( - \\var foo = \\hello; + \\var foo = \\hello; , .string_literal, - "\\\\hello;", // maybe report just "hello;" + "\\\\hello", // maybe report just "hello;" ); } test "position context - global error set" { + // TODO why is this a .var_access instead of a .global_error_set? + // try testContext( + // \\fn foo() error!void { + // , + // .global_error_set, + // null, + // ); + try testContext( + \\fn foo() error!void { + , + .global_error_set, + null, + ); try testContext( \\fn foo() error!void { , .global_error_set, null, ); + try testContext( + \\fn foo() error.!void { + , + .global_error_set, + null, + ); try testContext( \\fn foo() error.!void { , .global_error_set, null, ); + // TODO this should probably also be .global_error_set // try testContext( // \\fn foo() error{}!void { @@ -176,12 +424,30 @@ test "position context - global error set" { } test "position context - enum literal" { + try testContext( + \\var foo = .tag; + , + .enum_literal, + null, + ); + try testContext( + \\var foo = .tag; + , + .enum_literal, + null, + ); try testContext( \\var foo = .tag; , .enum_literal, null, ); + try testContext( + \\var foo = .; + , + .empty, + null, + ); try testContext( \\var foo = .; , @@ -191,6 +457,24 @@ test "position context - enum literal" { } test "position context - label" { + try testContext( + \\var foo = blk: { break :blk null }; + , + .pre_label, + null, + ); + try testContext( + \\var foo = blk: { break :blk null }; + , + .label, + null, + ); + try testContext( + \\var foo = blk: { break :blk null }; + , + .label, + null, + ); try testContext( \\var foo = blk: { break :blk null }; , @@ -206,12 +490,6 @@ test "position context - empty" { .empty, null, ); - try testContext( - \\const foo = struct {}; - , - .empty, - null, - ); try testContext( \\try foo(arg, slice[]); , @@ -237,7 +515,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "".len ..] }); defer allocator.free(final_line); - const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx); + const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx, true); if (std.meta.activeTag(ctx) != tag) { std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) }); From 0e53ac1328e3c3b08a2c2d1e9b4ecb1f14f5e8fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lan=20Cr=C3=ADstoffer?= Date: Sun, 22 Jan 2023 23:52:04 +0100 Subject: [PATCH 12/15] Partially fixes #907 (#908) --- src/Server.zig | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/src/Server.zig b/src/Server.zig index bde5f03..2fe67dd 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -2144,6 +2144,29 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty const completions = maybe_completions orelse return null; + // The cursor is in the middle of a word or before a @, so we can replace + // the remaining identifier with the completion instead of just inserting. + // TODO Identify function call/struct init and replace the whole thing. + const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); + if (server.client_capabilities.supports_apply_edits and pos_context.loc() != null and lookahead_context.loc() != null and pos_context.loc().?.end != lookahead_context.loc().?.end) { + var end = lookahead_context.loc().?.end; + while (end < handle.text.len and (std.ascii.isAlphanumeric(handle.text[end]) or handle.text[end] == '"')) { + end += 1; + } + + const replaceLoc = offsets.Loc{ .start = lookahead_context.loc().?.start, .end = end }; + const replaceRange = offsets.locToRange(handle.text, replaceLoc, server.offset_encoding); + + for (completions) |*item| { + item.textEdit = .{ + .TextEdit = .{ + .newText = item.insertText orelse item.label, + .range = replaceRange, + }, + }; + } + } + // truncate completions for (completions) |*item| { if (item.detail) |det| { From ed9cf06d5faaecc017fdc2e75867c54e9209244b Mon Sep 17 00:00:00 2001 From: Auguste Rame <19855629+SuperAuguste@users.noreply.github.com> Date: Mon, 23 Jan 2023 00:23:21 -0500 Subject: [PATCH 13/15] Properly refer to external repositories instead of lumping everything into zls (#936) --- .github/workflows/fuzz.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index be32b94..07eae8a 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ permissions: read-all jobs: fuzz: - if: github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz') + if: github.repository_owner == 'zigtools' && (github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz')) runs-on: ubuntu-latest steps: @@ -109,7 +109,7 @@ jobs: space_name: fuzzing-output space_region: nyc3 source: sus/saved_logs/ - out_dir: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.head_ref || github.ref_name }}/${{ github.sha }} + out_dir: ${{ github.event.pull_request.head.repo.full_name || github.repository }}/${{ github.head_ref || github.ref_name }}/${{ github.event.pull_request.head.sha || github.sha }} \ No newline at end of file From d2586f79a1b2a4268842c60ca1a658a60be38e5c Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Tue, 24 Jan 2023 21:43:48 +0100 Subject: [PATCH 14/15] replace ArrayList with MultiArrayList in DocumentScope (#938) --- src/analysis.zig | 401 +++++++++++++++++++++++++-------------------- src/debug.zig | 6 +- src/references.zig | 6 +- 3 files changed, 228 insertions(+), 185 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index cf97fe0..040411e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -975,8 +975,8 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl }; const new_handle = store.getOrLoadHandle(builtin_uri) orelse return null; - const root_scope = new_handle.document_scope.scopes.items[0]; - const decl = root_scope.decls.get("Type") orelse return null; + const root_scope_decls = new_handle.document_scope.scopes.items(.decls)[0]; + const decl = root_scope_decls.get("Type") orelse return null; if (decl != .ast_node) return null; const var_decl = ast.varDecl(new_handle.tree, decl.ast_node) orelse return null; @@ -2089,26 +2089,27 @@ pub const DeclWithHandle = struct { if (!switch_expr_type.isUnionType()) return null; - if (node_tags[pay.items[0]] == .enum_literal) { - const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null; - if (scope.decls.getEntry(tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { - switch (candidate.value_ptr.*) { - .ast_node => |node| { - if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| { - if (container_field.ast.type_expr != 0) { - return ((try resolveTypeOfNodeInternal( - store, - arena, - .{ .node = container_field.ast.type_expr, .handle = switch_expr_type.handle }, - bound_type_params, - )) orelse return null).instanceTypeVal(); - } - } - }, - else => {}, + if (node_tags[pay.items[0]] != .enum_literal) return null; + + const scope_index = findContainerScopeIndex(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null; + const scope_decls = switch_expr_type.handle.document_scope.scopes.items(.decls); + + const candidate = scope_decls[scope_index].getEntry(tree.tokenSlice(main_tokens[pay.items[0]])) orelse return null; + + switch (candidate.value_ptr.*) { + .ast_node => |node| { + if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| { + if (container_field.ast.type_expr != 0) { + return ((try resolveTypeOfNodeInternal( + store, + arena, + .{ .node = container_field.ast.type_expr, .handle = switch_expr_type.handle }, + bound_type_params, + )) orelse return null).instanceTypeVal(); + } } - return null; - } + }, + else => {}, } return null; }, @@ -2117,17 +2118,16 @@ pub const DeclWithHandle = struct { } }; -fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { +fn findContainerScopeIndex(container_handle: NodeWithHandle) ?usize { const container = container_handle.node; const handle = container_handle.handle; if (!ast.isContainer(handle.tree, container)) return null; - // Find the container scope. - return for (handle.document_scope.scopes.items) |*scope| { - switch (scope.data) { + return for (handle.document_scope.scopes.items(.data)) |data, scope_index| { + switch (data) { .container => |node| if (node == container) { - break scope; + break scope_index; }, else => {}, } @@ -2145,9 +2145,11 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena const is_enum = token_tags[main_token] == .keyword_enum; - const container_scope = findContainerScope(container_handle) orelse return; + const scope_decls = handle.document_scope.scopes.items(.decls); + const scope_uses = handle.document_scope.scopes.items(.uses); + const container_scope_index = findContainerScopeIndex(container_handle) orelse return; - var decl_it = container_scope.decls.iterator(); + var decl_it = scope_decls[container_scope_index].iterator(); while (decl_it.next()) |entry| { switch (entry.value_ptr.*) { .ast_node => |node| { @@ -2171,7 +2173,7 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena try callback(context, decl); } - for (container_scope.uses.items) |use| { + for (scope_uses[container_scope_index].items) |use| { const use_token = tree.nodes.items(.main_token)[use]; const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; if (handle != orig_handle and !is_pub) continue; @@ -2201,65 +2203,95 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena } } +pub const EnclosingScopeIterator = struct { + scope_locs: []offsets.Loc, + current_scope: usize, + source_index: usize, + + pub fn next(self: *EnclosingScopeIterator) ?usize { + while (self.current_scope < self.scope_locs.len) : (self.current_scope += 1) { + const scope_loc = self.scope_locs[self.current_scope]; + + if (self.source_index >= scope_loc.start and self.source_index <= scope_loc.end) { + defer self.current_scope += 1; + return self.current_scope; + } + if (scope_loc.start >= self.source_index) { + self.current_scope = self.scope_locs.len; + return null; + } + } + return null; + } +}; + +pub fn iterateEnclosingScopes(document_scope: DocumentScope, source_index: usize) EnclosingScopeIterator { + return .{ + .scope_locs = document_scope.scopes.items(.loc), + .current_scope = 0, + .source_index = source_index, + }; +} + pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *const DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void { var use_trail = std.ArrayList(Ast.Node.Index).init(arena.allocator()); return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail); } pub fn iterateLabels(handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void { - for (handle.document_scope.scopes.items) |scope| { - if (source_index >= scope.loc.start and source_index < scope.loc.end) { - var decl_it = scope.decls.iterator(); - while (decl_it.next()) |entry| { - switch (entry.value_ptr.*) { - .label_decl => {}, - else => continue, - } - try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle }); + const scope_decls = handle.document_scope.scopes.items(.decls); + + var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index); + while (scope_iterator.next()) |scope_index| { + var decl_it = scope_decls[scope_index].iterator(); + while (decl_it.next()) |entry| { + switch (entry.value_ptr.*) { + .label_decl => {}, + else => continue, } + try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle }); } - if (scope.loc.start >= source_index) return; } } fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void { - for (handle.document_scope.scopes.items) |scope| { - if (source_index >= scope.loc.start and source_index <= scope.loc.end) { - var decl_it = scope.decls.iterator(); - while (decl_it.next()) |entry| { - if (entry.value_ptr.* == .ast_node and - handle.tree.nodes.items(.tag)[entry.value_ptr.*.ast_node].isContainerField()) continue; - if (entry.value_ptr.* == .label_decl) continue; - try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle }); - } + const scope_decls = handle.document_scope.scopes.items(.decls); + const scope_uses = handle.document_scope.scopes.items(.uses); - for (scope.uses.items) |use| { - if (std.mem.indexOfScalar(Ast.Node.Index, use_trail.items, use) != null) continue; - try use_trail.append(use); - - const use_expr = (try resolveTypeOfNode( - store, - arena, - .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, - )) orelse continue; - const use_expr_node = switch (use_expr.type.data) { - .other => |n| n, - else => continue, - }; - try iterateSymbolsContainerInternal( - store, - arena, - .{ .node = use_expr_node, .handle = use_expr.handle }, - handle, - callback, - context, - false, - use_trail, - ); - } + var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index); + while (scope_iterator.next()) |scope_index| { + var decl_it = scope_decls[scope_index].iterator(); + while (decl_it.next()) |entry| { + if (entry.value_ptr.* == .ast_node and + handle.tree.nodes.items(.tag)[entry.value_ptr.*.ast_node].isContainerField()) continue; + if (entry.value_ptr.* == .label_decl) continue; + try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle }); } - if (scope.loc.start >= source_index) return; + for (scope_uses[scope_index].items) |use| { + if (std.mem.indexOfScalar(Ast.Node.Index, use_trail.items, use) != null) continue; + try use_trail.append(use); + + const use_expr = (try resolveTypeOfNode( + store, + arena, + .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, + )) orelse continue; + const use_expr_node = switch (use_expr.type.data) { + .other => |n| n, + else => continue, + }; + try iterateSymbolsContainerInternal( + store, + arena, + .{ .node = use_expr_node, .handle = use_expr.handle }, + handle, + callback, + context, + false, + use_trail, + ); + } } } @@ -2269,42 +2301,50 @@ pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocat } pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usize) usize { - if (handle.document_scope.scopes.items.len == 1) return 0; + if (handle.document_scope.scopes.len == 1) return 0; - var current: usize = 0; - for (handle.document_scope.scopes.items[1..]) |*scope, idx| { - if (source_index >= scope.loc.start and source_index <= scope.loc.end) { - switch (scope.data) { - .container, .function, .block => current = idx + 1, + const scope_locs = handle.document_scope.scopes.items(.loc); + const scope_datas = handle.document_scope.scopes.items(.data); + + var innermost: usize = 0; + var scope_index: usize = 1; + while (scope_index < handle.document_scope.scopes.len) : (scope_index += 1) { + const scope_loc = scope_locs[scope_index]; + if (source_index >= scope_loc.start and source_index <= scope_loc.end) { + switch (scope_datas[scope_index]) { + .container, .function, .block => innermost = scope_index, else => {}, } } - if (scope.loc.start > source_index) break; + if (scope_loc.start > source_index) break; } - return current; + return innermost; } pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) Ast.Node.Index { - return handle.document_scope.scopes.items[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?; + const scope_index = innermostBlockScopeIndex(handle, source_index); + return handle.document_scope.scopes.items(.data)[scope_index].toNodeIndex().?; } pub fn innermostContainer(handle: *const DocumentStore.Handle, source_index: usize) TypeWithHandle { - var current = handle.document_scope.scopes.items[0].data.container; - if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); + const scope_datas = handle.document_scope.scopes.items(.data); - for (handle.document_scope.scopes.items[1..]) |scope| { - if (source_index >= scope.loc.start and source_index <= scope.loc.end) { - switch (scope.data) { - .container => |node| current = node, - else => {}, - } + var current = scope_datas[0].container; + if (handle.document_scope.scopes.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); + + var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index); + while (scope_iterator.next()) |scope_index| { + switch (scope_datas[scope_index]) { + .container => |node| current = node, + else => {}, } - if (scope.loc.start > source_index) break; } return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); } fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const Ast.Node.Index, symbol: []const u8, handle: *const DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle { + if (uses.len == 0) return null; + // If we were asked to resolve this symbol before, // it is self-referential and we cannot resolve it. if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null) @@ -2337,20 +2377,20 @@ fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []co } pub fn lookupLabel(handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle { - for (handle.document_scope.scopes.items) |scope| { - if (source_index >= scope.loc.start and source_index < scope.loc.end) { - if (scope.decls.getEntry(symbol)) |candidate| { - switch (candidate.value_ptr.*) { - .label_decl => {}, - else => continue, - } - return DeclWithHandle{ - .decl = candidate.value_ptr, - .handle = handle, - }; - } + const scope_decls = handle.document_scope.scopes.items(.decls); + + var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index); + while (scope_iterator.next()) |scope_index| { + const candidate = scope_decls[scope_index].getEntry(symbol) orelse continue; + + switch (candidate.value_ptr.*) { + .label_decl => {}, + else => continue, } - if (scope.loc.start > source_index) return null; + return DeclWithHandle{ + .decl = candidate.value_ptr, + .handle = handle, + }; } return null; } @@ -2358,11 +2398,15 @@ pub fn lookupLabel(handle: *const DocumentStore.Handle, symbol: []const u8, sour pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle { const innermost_scope_idx = innermostBlockScopeIndex(handle.*, source_index); + const scope_locs = handle.document_scope.scopes.items(.loc); + const scope_decls = handle.document_scope.scopes.items(.decls); + const scope_uses = handle.document_scope.scopes.items(.uses); + var curr = innermost_scope_idx; while (curr >= 0) : (curr -= 1) { - const scope = &handle.document_scope.scopes.items[curr]; - if (source_index >= scope.loc.start and source_index <= scope.loc.end) blk: { - if (scope.decls.getEntry(symbol)) |candidate| { + const scope_loc = scope_locs[curr]; + if (source_index >= scope_loc.start and source_index <= scope_loc.end) blk: { + if (scope_decls[curr].getEntry(symbol)) |candidate| { switch (candidate.value_ptr.*) { .ast_node => |node| { if (handle.tree.nodes.items(.tag)[node].isContainerField()) break :blk; @@ -2375,7 +2419,7 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator .handle = handle, }; } - if (try resolveUse(store, arena, scope.uses.items, symbol, handle)) |result| return result; + if (try resolveUse(store, arena, scope_uses[curr].items, symbol, handle)) |result| return result; } if (curr == 0) break; } @@ -2399,9 +2443,11 @@ pub fn lookupSymbolContainer( const main_token = tree.nodes.items(.main_token)[container]; const is_enum = token_tags[main_token] == .keyword_enum; + const scope_decls = handle.document_scope.scopes.items(.decls); + const scope_uses = handle.document_scope.scopes.items(.uses); - if (findContainerScope(container_handle)) |container_scope| { - if (container_scope.decls.getEntry(symbol)) |candidate| { + if (findContainerScopeIndex(container_handle)) |container_scope_index| { + if (scope_decls[container_scope_index].getEntry(symbol)) |candidate| { switch (candidate.value_ptr.*) { .ast_node => |node| { if (node_tags[node].isContainerField()) { @@ -2415,8 +2461,7 @@ pub fn lookupSymbolContainer( return DeclWithHandle{ .decl = candidate.value_ptr, .handle = handle }; } - if (try resolveUse(store, arena, container_scope.uses.items, symbol, handle)) |result| return result; - return null; + if (try resolveUse(store, arena, scope_uses[container_scope_index].items, symbol, handle)) |result| return result; } return null; @@ -2446,15 +2491,19 @@ comptime { } pub const DocumentScope = struct { - scopes: std.ArrayListUnmanaged(Scope), + scopes: std.MultiArrayList(Scope), error_completions: CompletionSet, enum_completions: CompletionSet, pub fn deinit(self: *DocumentScope, allocator: std.mem.Allocator) void { - for (self.scopes.items) |*scope| { - scope.deinit(allocator); + var i: usize = 0; + while (i < self.scopes.len) : (i += 1) { + self.scopes.items(.decls)[i].deinit(allocator); + self.scopes.items(.tests)[i].deinit(allocator); + self.scopes.items(.uses)[i].deinit(allocator); } self.scopes.deinit(allocator); + for (self.error_completions.entries.items(.key)) |item| { if (item.detail) |detail| allocator.free(detail); switch (item.documentation orelse continue) { @@ -2480,6 +2529,13 @@ pub const Scope = struct { function: Ast.Node.Index, // .tag is FnProto block: Ast.Node.Index, // .tag is Block other, + + pub fn toNodeIndex(self: Data) ?Ast.Node.Index { + return switch (self) { + .container, .function, .block => |idx| idx, + else => null, + }; + } }; loc: offsets.Loc, @@ -2487,19 +2543,6 @@ pub const Scope = struct { tests: std.ArrayListUnmanaged(Ast.Node.Index) = .{}, uses: std.ArrayListUnmanaged(Ast.Node.Index) = .{}, data: Data, - - pub fn deinit(self: *Scope, allocator: std.mem.Allocator) void { - self.decls.deinit(allocator); - self.tests.deinit(allocator); - self.uses.deinit(allocator); - } - - pub fn toNodeIndex(self: Scope) ?Ast.Node.Index { - return switch (self.data) { - .container, .function, .block => |idx| idx, - else => null, - }; - } }; pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope { @@ -2523,7 +2566,7 @@ pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope } const ScopeContext = struct { - scopes: *std.ArrayListUnmanaged(Scope), + scopes: *std.MultiArrayList(Scope), enums: *CompletionSet, errors: *CompletionSet, tree: Ast, @@ -2538,12 +2581,11 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: const main_tokens = tree.nodes.items(.main_token); const node_tag = tags[node_idx]; - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = offsets.nodeToLoc(tree, node_idx), .data = .{ .container = node_idx }, - }; - const scope_idx = scopes.items.len - 1; + }); + const scope_index = scopes.len - 1; if (node_tag == .error_set_decl) { // All identifiers in main_token..data.lhs are error fields. @@ -2551,7 +2593,7 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: while (i < data[node_idx].rhs) : (i += 1) { if (token_tags[i] == .identifier) { const name = offsets.tokenToSlice(tree, i); - if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .error_token = i })) |_| { + if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| { // TODO Record a redefinition error. } const gop = try context.errors.getOrPut(allocator, .{ @@ -2572,7 +2614,7 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: const ast_decls = ast.declMembers(tree, node_idx, &buf); for (ast_decls) |decl| { if (tags[decl] == .@"usingnamespace") { - try scopes.items[scope_idx].uses.append(allocator, decl); + try scopes.items(.uses)[scope_index].append(allocator, decl); continue; } @@ -2580,10 +2622,10 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: const name = getDeclName(tree, decl) orelse continue; if (tags[decl] == .test_decl) { - try scopes.items[scope_idx].tests.append(allocator, decl); + try scopes.items(.tests)[scope_index].append(allocator, decl); continue; } - if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = decl })) |existing| { + if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = decl })) |existing| { _ = existing; // TODO Record a redefinition error. } @@ -2663,13 +2705,13 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .loc = offsets.nodeToLoc(tree, node_idx), .data = .{ .function = node_idx }, }); - const scope_idx = scopes.items.len - 1; + const scope_index = scopes.len - 1; var it = func.iterate(&tree); while (ast.nextFnParam(&it)) |param| { // Add parameter decls if (param.name_token) |name_token| { - if (try scopes.items[scope_idx].decls.fetchPut( + if (try scopes.items(.decls)[scope_index].fetchPut( allocator, tree.tokenSlice(name_token), .{ .param_payload = .{ .param = param, .func = node_idx } }, @@ -2707,15 +2749,15 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i // if labeled block if (token_tags[first_token] == .identifier) { - const scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, main_tokens[node_idx]), .end = offsets.tokenToLoc(tree, last_token).start, }, .data = .other, - }; - try scope.decls.putNoClobber(allocator, tree.tokenSlice(first_token), .{ .label_decl = .{ + }); + const scope_index = scopes.len - 1; + try scopes.items(.decls)[scope_index].putNoClobber(allocator, tree.tokenSlice(first_token), .{ .label_decl = .{ .label = first_token, .block = node_idx, } }); @@ -2725,21 +2767,21 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .loc = offsets.nodeToLoc(tree, node_idx), .data = .{ .container = node_idx }, }); - const scope_idx = scopes.items.len - 1; + const scope_index = scopes.len - 1; var buffer: [2]Ast.Node.Index = undefined; const statements = ast.blockStatements(tree, node_idx, &buffer).?; for (statements) |idx| { if (tags[idx] == .@"usingnamespace") { - try scopes.items[scope_idx].uses.append(allocator, idx); + try scopes.items(.uses)[scope_index].append(allocator, idx); continue; } try makeScopeInternal(allocator, context, idx); if (ast.varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); - if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = idx })) |existing| { + if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = idx })) |existing| { _ = existing; // TODO record a redefinition error. } @@ -2754,20 +2796,20 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i const if_node = ast.ifFull(tree, node_idx); if (if_node.payload_token) |payload| { - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, payload), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.then_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); std.debug.assert(token_tags[name_token] == .identifier); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(allocator, name, .{ + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .pointer_payload = .{ .name = name_token, .condition = if_node.ast.cond_expr, @@ -2780,17 +2822,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i if (if_node.ast.else_expr != 0) { if (if_node.error_token) |err_token| { std.debug.assert(token_tags[err_token] == .identifier); - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, err_token), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.else_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; const name = tree.tokenSlice(err_token); - try scope.decls.putNoClobber(allocator, name, .{ .ast_node = if_node.ast.else_expr }); + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = if_node.ast.else_expr }); } try makeScopeInternal(allocator, context, if_node.ast.else_expr); } @@ -2801,21 +2843,21 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i const catch_token = main_tokens[node_idx]; const catch_expr = data[node_idx].rhs; - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, tree.firstToken(catch_expr)), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, catch_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; if (token_tags.len > catch_token + 2 and token_tags[catch_token + 1] == .pipe and token_tags[catch_token + 2] == .identifier) { const name = tree.tokenSlice(catch_token + 2); - try scope.decls.putNoClobber(allocator, name, .{ .ast_node = catch_expr }); + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = catch_expr }); } try makeScopeInternal(allocator, context, catch_expr); }, @@ -2830,36 +2872,36 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i if (while_node.label_token) |label| { std.debug.assert(token_tags[label] == .identifier); - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, while_node.ast.while_token), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, node_idx)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; - try scope.decls.putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = .{ + try scopes.items(.decls)[scope_index].putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = .{ .label = label, .block = while_node.ast.then_expr, } }); } if (while_node.payload_token) |payload| { - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, payload), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.then_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); std.debug.assert(token_tags[name_token] == .identifier); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(allocator, name, if (is_for) .{ + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, if (is_for) .{ .array_payload = .{ .identifier = name_token, .array_expr = while_node.ast.cond_expr, @@ -2875,7 +2917,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i if (token_tags[name_token + 1] == .comma) { const index_token = name_token + 2; std.debug.assert(token_tags[index_token] == .identifier); - if (try scope.decls.fetchPut( + if (try scopes.items(.decls)[scope_index].fetchPut( allocator, tree.tokenSlice(index_token), .{ .array_index = index_token }, @@ -2890,17 +2932,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i if (while_node.ast.else_expr != 0) { if (while_node.error_token) |err_token| { std.debug.assert(token_tags[err_token] == .identifier); - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, err_token), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.else_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; const name = tree.tokenSlice(err_token); - try scope.decls.putNoClobber(allocator, name, .{ .ast_node = while_node.ast.else_expr }); + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = while_node.ast.else_expr }); } try makeScopeInternal(allocator, context, while_node.ast.else_expr); } @@ -2920,20 +2962,20 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i }; if (switch_case.payload_token) |payload| { - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, payload), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end, }, .data = .other, - }; + }); + const scope_index = scopes.len - 1; // if payload is *name than get next token const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(allocator, name, .{ + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .switch_payload = .{ .node = name_token, .switch_expr = cond, @@ -3084,18 +3126,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i const expr = data[node_idx].rhs; if (data[node_idx].lhs != 0) { const payload_token = data[node_idx].lhs; - var scope = try scopes.addOne(allocator); - scope.* = .{ + try scopes.append(allocator, .{ .loc = .{ .start = offsets.tokenToIndex(tree, payload_token), .end = offsets.tokenToLoc(tree, ast.lastToken(tree, expr)).end, }, .data = .other, - }; - errdefer scope.decls.deinit(allocator); + }); + const scope_index = scopes.len - 1; const name = tree.tokenSlice(payload_token); - try scope.decls.putNoClobber(allocator, name, .{ .ast_node = expr }); + try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = expr }); } try makeScopeInternal(allocator, context, expr); diff --git a/src/debug.zig b/src/debug.zig index 02a1b38..d5fecd6 100644 --- a/src/debug.zig +++ b/src/debug.zig @@ -43,8 +43,10 @@ pub fn printTree(tree: std.zig.Ast) void { pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void { if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!"); - for (doc_scope.scopes.items) |scope, i| { - if (i != 0) std.debug.print("\n\n", .{}); + var index: usize = 0; + while(index < doc_scope.scopes.len) : (index += 1) { + const scope = doc_scope.scopes.get(index); + if (index != 0) std.debug.print("\n\n", .{}); std.debug.print( \\[{d}, {d}] {} \\usingnamespaces: {d} diff --git a/src/references.zig b/src/references.zig index e508ea9..9bc24b0 100644 --- a/src/references.zig +++ b/src/references.zig @@ -514,10 +514,10 @@ pub fn symbolReferences( .param_payload => |pay| blk: { // Rename the param tok. const param = pay.param; - for (curr_handle.document_scope.scopes.items) |scope| { - if (scope.data != .function) continue; + for (curr_handle.document_scope.scopes.items(.data)) |scope_data| { + if (scope_data != .function) continue; - const proto = scope.data.function; + const proto = scope_data.function; var buf: [1]Ast.Node.Index = undefined; const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?; From fe54fb7cfac53ad13cc4fb71479106d26cc90564 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Wed, 25 Jan 2023 00:55:38 +0100 Subject: [PATCH 15/15] Use new ast helper functions (#939) * use new ast helper functions * use new ast helper functions * update min build version * fix merge conflicts --- build.zig | 2 +- src/ComptimeInterpreter.zig | 138 ++++++++++-------------- src/Server.zig | 28 ++--- src/analysis.zig | 196 +++++++++++++++------------------ src/ast.zig | 210 +++++++++++------------------------- src/inlay_hints.zig | 50 +++------ src/references.zig | 60 ++++------- src/semantic_tokens.zig | 55 +++------- src/signature_help.zig | 4 +- src/translate_c.zig | 4 +- 10 files changed, 272 insertions(+), 475 deletions(-) diff --git a/build.zig b/build.zig index e341cbe..9010aa7 100644 --- a/build.zig +++ b/build.zig @@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 }; pub fn build(b: *std.build.Builder) !void { comptime { const current_zig = builtin.zig_version; - const min_zig = std.SemanticVersion.parse("0.11.0-dev.874+40ed6ae84") catch return; // Changes to builtin.Type API + const min_zig = std.SemanticVersion.parse("0.11.0-dev.1254+1f8f79cd5") catch return; // add helper functions to std.zig.Ast if (current_zig.order(min_zig) == .lt) { @compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig })); } diff --git a/src/ComptimeInterpreter.zig b/src/ComptimeInterpreter.zig index 55104c0..bde02db 100644 --- a/src/ComptimeInterpreter.zig +++ b/src/ComptimeInterpreter.zig @@ -325,7 +325,7 @@ pub const Declaration = struct { .aligned_var_decl, .simple_var_decl, => { - const var_decl = ast.varDecl(tree, decl.node_idx).?; + const var_decl = tree.fullVarDecl(decl.node_idx).?; if (var_decl.ast.init_node == 0) return error.CriticalAstFailure; @@ -361,7 +361,7 @@ pub const Declaration = struct { .aligned_var_decl, .simple_var_decl, => { - return tree.tokenSlice(ast.varDecl(tree, declaration.node_idx).?.ast.mut_token).len != 3; + return tree.tokenSlice(tree.fullVarDecl(declaration.node_idx).?.ast.mut_token).len != 3; }, else => false, }; @@ -496,24 +496,7 @@ pub const InterpreterScope = struct { pub const ScopeKind = enum { container, block, function }; pub fn scopeKind(scope: InterpreterScope) ScopeKind { const tree = scope.interpreter.getHandle().tree; - return switch (tree.nodes.items(.tag)[scope.node_idx]) { - .container_decl, - .container_decl_trailing, - .container_decl_arg, - .container_decl_arg_trailing, - .container_decl_two, - .container_decl_two_trailing, - .tagged_union, - .tagged_union_trailing, - .tagged_union_two, - .tagged_union_two_trailing, - .tagged_union_enum_tag, - .tagged_union_enum_tag_trailing, - .root, - .error_set_decl, - => .container, - else => .block, - }; + return if (ast.isContainer(tree, scope.node_idx)) .container else .block; } pub fn getLabel(scope: InterpreterScope) ?Ast.TokenIndex { @@ -601,11 +584,10 @@ pub const InterpretResult = union(enum) { fn getDeclCount(tree: Ast, node_idx: Ast.Node.Index) usize { var buffer: [2]Ast.Node.Index = undefined; - const members = ast.declMembers(tree, node_idx, &buffer); + const container_decl = tree.fullContainerDecl(&buffer, node_idx).?; var count: usize = 0; - - for (members) |member| { + for (container_decl.ast.members) |member| { switch (tree.nodes.items(.tag)[member]) { .global_var_decl, .local_var_decl, @@ -638,9 +620,9 @@ pub fn huntItDown( log.info("Order-independent evaluating {s}...", .{decl_name}); var buffer: [2]Ast.Node.Index = undefined; - const members = ast.declMembers(tree, pscope.node_idx, &buffer); + const container_decl = tree.fullContainerDecl(&buffer, pscope.node_idx).?; - for (members) |member| { + for (container_decl.ast.members) |member| { switch (tags[member]) { .global_var_decl, .local_var_decl, @@ -751,7 +733,6 @@ pub fn interpret( // .tagged_union_enum_tag, // .tagged_union_enum_tag_trailing, .root, - .error_set_decl, => { var container_scope = try interpreter.newScope(scope, node_idx); var type_info = TypeInfo{ @@ -764,55 +745,47 @@ pub fn interpret( if (node_idx == 0) interpreter.root_type = cont_type; var buffer: [2]Ast.Node.Index = undefined; - const members = ast.declMembers(tree, node_idx, &buffer); + const container_decl = tree.fullContainerDecl(&buffer, node_idx).?; - var field_idx: usize = 0; - for (members) |member| { - const maybe_container_field: ?zig.Ast.full.ContainerField = switch (tags[member]) { - .container_field => tree.containerField(member), - .container_field_align => tree.containerFieldAlign(member), - .container_field_init => tree.containerFieldInit(member), - else => null, + for (container_decl.ast.members) |member| { + const container_field = tree.fullContainerField(member) orelse { + _ = try interpreter.interpret(member, container_scope, options); + continue; }; - if (maybe_container_field) |field_info| { - var init_type_value = try (try interpreter.interpret(field_info.ast.type_expr, container_scope, .{})).getValue(); - var default_value = if (field_info.ast.value_expr == 0) - null - else - try (try interpreter.interpret(field_info.ast.value_expr, container_scope, .{})).getValue(); + var init_type_value = try (try interpreter.interpret(container_field.ast.type_expr, container_scope, .{})).getValue(); + var default_value = if (container_field.ast.value_expr == 0) + null + else + try (try interpreter.interpret(container_field.ast.value_expr, container_scope, .{})).getValue(); - if (init_type_value.type.getTypeInfo() != .type) { - try interpreter.recordError( - field_info.ast.type_expr, - "expected_type", - try std.fmt.allocPrint(interpreter.allocator, "expected type 'type', found '{s}'", .{interpreter.formatTypeInfo(init_type_value.type.getTypeInfo())}), - ); - continue; - } - - const name = if (field_info.ast.tuple_like) - &[0]u8{} - else - tree.tokenSlice(field_info.ast.main_token); - const field = FieldDefinition{ - .node_idx = member, - .name = name, - .type = init_type_value.value_data.type, - .default_value = default_value, - // TODO: Default values - // .@"type" = T: { - // var value = (try interpreter.interpret(field_info.ast.type_expr, scope_idx, true)).?.value; - // break :T @ptrCast(*Type, @alignCast(@alignOf(*Type), value)).*; - // }, - // .value = null, - }; - - try cont_type.getTypeInfoMutable().@"struct".fields.put(interpreter.allocator, name, field); - field_idx += 1; - } else { - _ = try interpreter.interpret(member, container_scope, options); + if (init_type_value.type.getTypeInfo() != .type) { + try interpreter.recordError( + container_field.ast.type_expr, + "expected_type", + try std.fmt.allocPrint(interpreter.allocator, "expected type 'type', found '{s}'", .{interpreter.formatTypeInfo(init_type_value.type.getTypeInfo())}), + ); + continue; } + + const name = if (container_field.ast.tuple_like) + &[0]u8{} + else + tree.tokenSlice(container_field.ast.main_token); + const field = FieldDefinition{ + .node_idx = member, + .name = name, + .type = init_type_value.value_data.type, + .default_value = default_value, + // TODO: Default values + // .@"type" = T: { + // var value = (try interpreter.interpret(container_field.ast.type_expr, scope_idx, true)).?.value; + // break :T @ptrCast(*Type, @alignCast(@alignOf(*Type), value)).*; + // }, + // .value = null, + }; + + try cont_type.getTypeInfoMutable().@"struct".fields.put(interpreter.allocator, name, field); } return InterpretResult{ .value = Value{ @@ -822,6 +795,9 @@ pub fn interpret( .value_data = try interpreter.createValueData(.{ .type = cont_type }), } }; }, + .error_set_decl => { + return InterpretResult{ .nothing = {} }; + }, .global_var_decl, .local_var_decl, .aligned_var_decl, @@ -832,7 +808,7 @@ pub fn interpret( if (scope.?.declarations.contains(name)) return InterpretResult{ .nothing = {} }; - const decl = ast.varDecl(tree, node_idx).?; + const decl = tree.fullVarDecl(node_idx).?; if (decl.ast.init_node == 0) return InterpretResult{ .nothing = {} }; @@ -1009,16 +985,18 @@ pub fn interpret( else InterpretResult{ .return_with_value = try (try interpreter.interpret(data[node_idx].lhs, scope, options)).getValue() }; }, - .@"if", .if_simple => { - const iff = ast.ifFull(tree, node_idx); + .@"if", + .if_simple, + => { + const if_node = ast.fullIf(tree, node_idx).?; // TODO: Don't evaluate runtime ifs // if (options.observe_values) { - const ir = try interpreter.interpret(iff.ast.cond_expr, scope, options); + const ir = try interpreter.interpret(if_node.ast.cond_expr, scope, options); if ((try ir.getValue()).value_data.bool) { - return try interpreter.interpret(iff.ast.then_expr, scope, options); + return try interpreter.interpret(if_node.ast.then_expr, scope, options); } else { - if (iff.ast.else_expr != 0) { - return try interpreter.interpret(iff.ast.else_expr, scope, options); + if (if_node.ast.else_expr != 0) { + return try interpreter.interpret(if_node.ast.else_expr, scope, options); } else return InterpretResult{ .nothing = {} }; } }, @@ -1254,7 +1232,7 @@ pub fn interpret( // .fn_proto_simple, .fn_decl => { // var buf: [1]Ast.Node.Index = undefined; - // const func = ast.fnProto(tree, node_idx, &buf).?; + // const func = tree.fullFnProto(node_idx, &buf).?; // TODO: Add params @@ -1315,7 +1293,7 @@ pub fn interpret( .async_call_one_comma, => { var params: [1]Ast.Node.Index = undefined; - const call_full = ast.callFull(tree, node_idx, ¶ms) orelse unreachable; + const call_full = tree.fullCall(¶ms, node_idx) orelse unreachable; var args = try std.ArrayListUnmanaged(Value).initCapacity(interpreter.allocator, call_full.ast.params.len); defer args.deinit(interpreter.allocator); @@ -1433,7 +1411,7 @@ pub fn call( var fn_scope = try interpreter.newScope(scope, func_node_idx); var buf: [1]Ast.Node.Index = undefined; - var proto = ast.fnProto(tree, func_node_idx, &buf).?; + var proto = tree.fullFnProto(&buf, func_node_idx).?; var arg_it = proto.iterate(&tree); var arg_index: usize = 0; diff --git a/src/Server.zig b/src/Server.zig index 2fe67dd..a8969e8 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -273,7 +273,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO .fn_decl, => blk: { var buf: [1]Ast.Node.Index = undefined; - const func = ast.fnProto(tree, decl_idx, &buf).?; + const func = tree.fullFnProto(&buf, decl_idx).?; if (func.extern_export_inline_token != null) break :blk; if (func.name_token) |name_token| { @@ -689,7 +689,7 @@ fn nodeToCompletion( .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const func = ast.fnProto(tree, node, &buf).?; + const func = tree.fullFnProto(&buf, node).?; if (func.name_token) |name_token| { const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets; const insert_text = if (use_snippets) blk: { @@ -715,7 +715,7 @@ fn nodeToCompletion( .aligned_var_decl, .simple_var_decl, => { - const var_decl = ast.varDecl(tree, node).?; + const var_decl = tree.fullVarDecl(node).?; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| { @@ -740,7 +740,7 @@ fn nodeToCompletion( .container_field_align, .container_field_init, => { - const field = ast.containerField(tree, node).?; + const field = tree.fullContainerField(node).?; try list.append(allocator, .{ .label = handle.tree.tokenSlice(field.ast.main_token), .kind = if (field.ast.tuple_like) .Enum else .Field, @@ -766,7 +766,7 @@ fn nodeToCompletion( .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = ast.ptrType(tree, node).?; + const ptr_type = ast.fullPtrType(tree, node).?; switch (ptr_type.size) { .One, .C, .Many => if (server.config.operator_completions) { @@ -900,11 +900,11 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO var buf: [1]Ast.Node.Index = undefined; - if (ast.varDecl(tree, node)) |var_decl| { + if (tree.fullVarDecl(node)) |var_decl| { break :def analysis.getVariableSignature(tree, var_decl); - } else if (ast.fnProto(tree, node, &buf)) |fn_proto| { + } else if (tree.fullFnProto(&buf, node)) |fn_proto| { break :def analysis.getFunctionSignature(tree, fn_proto); - } else if (ast.containerField(tree, node)) |field| { + } else if (tree.fullContainerField(node)) |field| { break :def analysis.getContainerFieldSignature(tree, field); } else { break :def analysis.nodeToString(tree, node) orelse return null; @@ -2621,8 +2621,10 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error .root => continue, // only fold the expression pertaining to the if statement, and the else statement, each respectively. // TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures. - .@"if", .if_simple => { - const if_full = ast.ifFull(handle.tree, node); + .@"if", + .if_simple, + => { + const if_full = ast.fullIf(handle.tree, node).?; const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr); const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr); @@ -2643,7 +2645,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error .while_cont, .while_simple, => { - const loop_full = ast.whileAst(handle.tree, node).?; + const loop_full = ast.fullWhile(handle.tree, node).?; const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr); const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr); @@ -2685,8 +2687,8 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error } // Function prototype folding regions - var fn_proto_buffer: [1]Node.Index = undefined; - const fn_proto = ast.fnProto(handle.tree, node, fn_proto_buffer[0..]) orelse + var buffer: [1]Node.Index = undefined; + const fn_proto = handle.tree.fullFnProto(&buffer, node) orelse break :decl_node_blk; const list_start_tok: Ast.TokenIndex = fn_proto.lparen; diff --git a/src/analysis.zig b/src/analysis.zig index 040411e..c21d0de 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -194,7 +194,6 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt if (param.type_expr == 0) return false; const token_starts = tree.tokens.items(.start); - const token_data = tree.nodes.items(.data); const in_container = innermostContainer(handle, token_starts[func.ast.fn_token]); if (try resolveTypeOfNode(document_store, arena, .{ @@ -205,9 +204,9 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt return true; } - if (ast.isPtrType(tree, param.type_expr)) { + if (ast.fullPtrType(tree, param.type_expr)) |ptr_type| { if (try resolveTypeOfNode(document_store, arena, .{ - .node = token_data[param.type_expr].rhs, + .node = ptr_type.ast.child_type, .handle = handle, })) |resolved_prefix_op| { if (std.meta.eql(in_container, resolved_prefix_op)) @@ -280,7 +279,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { .global_var_decl, .simple_var_decl, .aligned_var_decl, - => ast.varDecl(tree, node).?.ast.mut_token + 1, + => tree.fullVarDecl(node).?.ast.mut_token + 1, // function declaration names .fn_proto, .fn_proto_multi, @@ -289,7 +288,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { .fn_decl, => blk: { var params: [1]Ast.Node.Index = undefined; - break :blk ast.fnProto(tree, node, ¶ms).?.name_token; + break :blk tree.fullFnProto(¶ms, node).?.name_token; }, // containers @@ -297,7 +296,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { .container_field_init, .container_field_align, => { - const field = ast.containerField(tree, node).?.ast; + const field = tree.fullContainerField(node).?.ast; return field.main_token; }, @@ -381,7 +380,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato const token_tags = tree.tokens.items(.tag); const node_tags = tree.nodes.items(.tag); - if (ast.varDecl(handle.tree, decl)) |var_decl| { + if (handle.tree.fullVarDecl(decl)) |var_decl| { if (var_decl.ast.init_node == 0) return null; const base_exp = var_decl.ast.init_node; if (token_tags[var_decl.ast.mut_token] != .keyword_const) return null; @@ -411,8 +410,9 @@ fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.N if (node_tags[child_idx] == .@"return") { if (datas[child_idx].lhs != 0) { const lhs = datas[child_idx].lhs; - if (ast.isCall(tree, lhs)) { - const call_name = getDeclName(tree, datas[lhs].lhs); + var buf: [1]Ast.Node.Index = undefined; + if (tree.fullCall(&buf, lhs)) |call| { + const call_name = getDeclName(tree, call.ast.fn_expr); if (call_name) |name| { if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) { continue; @@ -528,8 +528,7 @@ fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, dere const main_token = tree.nodes.items(.main_token)[deref_node]; const token_tag = tree.tokens.items(.tag)[main_token]; - if (ast.isPtrType(tree, deref_node)) { - const ptr_type = ast.ptrType(tree, deref_node).?; + if (ast.fullPtrType(tree, deref_node)) |ptr_type| { switch (token_tag) { .asterisk => { return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -566,7 +565,7 @@ fn resolveBracketAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocat .type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false }, .handle = lhs.handle, }; - } else if (ast.ptrType(tree, lhs_node)) |ptr_type| { + } else if (ast.fullPtrType(tree, lhs_node)) |ptr_type| { if (ptr_type.size == .Slice) { if (rhs == .Single) { return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -647,7 +646,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl .simple_var_decl, .aligned_var_decl, => { - const var_decl = ast.varDecl(tree, node).?; + const var_decl = tree.fullVarDecl(node).?; if (var_decl.ast.type_node != 0) { const decl_type = .{ .node = var_decl.ast.type_node, .handle = handle }; if (try resolveTypeOfNodeInternal(store, arena, decl_type, bound_type_params)) |typ| @@ -679,7 +678,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl switch (child.decl.*) { .ast_node => |n| { if (n == node) return null; - if (ast.varDecl(child.handle.tree, n)) |var_decl| { + if (child.handle.tree.fullVarDecl(n)) |var_decl| { if (var_decl.ast.init_node == node) return null; } @@ -700,7 +699,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl .async_call_one_comma, => { var params: [1]Ast.Node.Index = undefined; - const call = ast.callFull(tree, node, ¶ms) orelse unreachable; + const call = tree.fullCall(¶ms, node) orelse unreachable; const callee = .{ .node = call.ast.fn_expr, .handle = handle }; const decl = (try resolveTypeOfNodeInternal(store, arena, callee, bound_type_params)) orelse @@ -712,7 +711,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl else => return null, }; var buf: [1]Ast.Node.Index = undefined; - const func_maybe = ast.fnProto(decl.handle.tree, decl_node, &buf); + const func_maybe = decl.handle.tree.fullFnProto(&buf, decl_node); if (func_maybe) |fn_decl| { var expected_params = fn_decl.ast.params.len; @@ -979,7 +978,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl const decl = root_scope_decls.get("Type") orelse return null; if (decl != .ast_node) return null; - const var_decl = ast.varDecl(new_handle.tree, decl.ast_node) orelse return null; + const var_decl = new_handle.tree.fullVarDecl(decl.ast_node) orelse return null; return TypeWithHandle{ .type = .{ @@ -1019,7 +1018,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl => { var buf: [1]Ast.Node.Index = undefined; // This is a function type - if (ast.fnProto(tree, node, &buf).?.name_token == null) { + if (tree.fullFnProto(&buf, node).?.name_token == null) { return TypeWithHandle.typeVal(node_handle); } @@ -1107,11 +1106,10 @@ pub const TypeWithHandle = struct { const tree = self.handle.tree; const node = self.type.data.other; const tags = tree.nodes.items(.tag); - if (ast.isContainer(tree, node)) { - var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(tree, node, &buf)) |child| { - if (tags[child].isContainerField()) return false; - } + var buf: [2]Ast.Node.Index = undefined; + const full = tree.fullContainerDecl(&buf, node) orelse return true; + for (full.ast.members) |member| { + if (tags[member].isContainerField()) return false; } return true; } @@ -1132,7 +1130,7 @@ pub const TypeWithHandle = struct { var buf: [1]Ast.Node.Index = undefined; const tree = self.handle.tree; return switch (self.type.data) { - .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: { + .other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: { break :blk isTypeFunction(tree, fn_proto); } else false, else => false, @@ -1143,7 +1141,7 @@ pub const TypeWithHandle = struct { var buf: [1]Ast.Node.Index = undefined; const tree = self.handle.tree; return switch (self.type.data) { - .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: { + .other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: { break :blk isGenericFunction(tree, fn_proto); } else false, else => false, @@ -1343,7 +1341,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator if (current_type.?.type.is_type_val) return null; const cur_tree = current_type.?.handle.tree; var buf: [1]Ast.Node.Index = undefined; - if (ast.fnProto(cur_tree, current_type_node, &buf)) |func| { + if (cur_tree.fullFnProto(&buf, current_type_node)) |func| { // Check if the function has a body and if so, pass it // so the type can be resolved if it's a generic function returning // an anonymous struct @@ -1408,13 +1406,13 @@ pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool { .local_var_decl, .simple_var_decl, .aligned_var_decl, - => ast.varDecl(tree, node).?.visib_token != null, + => tree.fullVarDecl(node).?.visib_token != null, .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl, - => ast.fnProto(tree, node, &buf).?.visib_token != null, + => tree.fullFnProto(&buf, node).?.visib_token != null, else => true, }; } @@ -1428,7 +1426,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { .container_field_init, .container_field_align, => { - const field = ast.containerField(tree, node).?.ast; + const field = tree.fullContainerField(node).?.ast; return if (field.tuple_like) null else tree.tokenSlice(field.main_token); }, .error_value => tree.tokenSlice(data[node].rhs), @@ -1438,7 +1436,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { .fn_proto_one, .fn_proto_simple, .fn_decl, - => if (ast.fnProto(tree, node, &buf).?.name_token) |name| tree.tokenSlice(name) else null, + => if (tree.fullFnProto(&buf, node).?.name_token) |name| tree.tokenSlice(name) else null, .field_access => tree.tokenSlice(data[node].rhs), .call, .call_comma, @@ -1465,16 +1463,16 @@ fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 { const node_tags = tree.nodes.items(.tag); + var buf: [2]Ast.Node.Index = undefined; - if (ast.isContainer(tree, node)) { - const decls = ast.declMembers(tree, node, &buf); - for (decls) |decl_idx| { + if (tree.fullContainerDecl(&buf, node)) |container_decl| { + for (container_decl.ast.members) |decl_idx| { if (getImportStr(tree, decl_idx, source_index)) |name| { return name; } } return null; - } else if (ast.varDecl(tree, node)) |var_decl| { + } else if (tree.fullVarDecl(node)) |var_decl| { return getImportStr(tree, var_decl.ast.init_node, source_index); } else if (node_tags[node] == .@"usingnamespace") { return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index); @@ -1868,7 +1866,8 @@ fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Inde .tagged_union_two_trailing, => { var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(tree, child, &buf)) |member| + const members = tree.fullContainerDecl(&buf, child).?.ast.members; + for (members) |member| try addOutlineNodes(allocator, tree, member, context); return; }, @@ -1927,20 +1926,18 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast .encoding = context.encoding, }; - if (ast.isContainer(tree, node)) { - var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(tree, node, &buf)) |child| + var buf: [2]Ast.Node.Index = undefined; + if (tree.fullContainerDecl(&buf, node)) |container_decl| { + for (container_decl.ast.members) |child| { try addOutlineNodes(allocator, tree, child, &child_context); - } - - if (ast.varDecl(tree, node)) |var_decl| { + } + } else if (tree.fullVarDecl(node)) |var_decl| { if (var_decl.ast.init_node != 0) try addOutlineNodes(allocator, tree, var_decl.ast.init_node, &child_context); - } - if (tags[node] == .fn_decl) fn_ch: { + } else if (tags[node] == .fn_decl) fn_ch: { const fn_decl = tree.nodes.items(.data)[node]; var params: [1]Ast.Node.Index = undefined; - const fn_proto = ast.fnProto(tree, fn_decl.lhs, ¶ms) orelse break :fn_ch; + const fn_proto = tree.fullFnProto(¶ms, fn_decl.lhs).?; if (!isTypeFunction(tree, fn_proto)) break :fn_ch; const ret_stmt = findReturnStatement(tree, fn_proto, fn_decl.rhs) orelse break :fn_ch; const type_decl = tree.nodes.items(.data)[ret_stmt].lhs; @@ -2098,7 +2095,7 @@ pub const DeclWithHandle = struct { switch (candidate.value_ptr.*) { .ast_node => |node| { - if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| { + if (switch_expr_type.handle.tree.fullContainerField(node)) |container_field| { if (container_field.ast.type_expr != 0) { return ((try resolveTypeOfNodeInternal( store, @@ -2576,10 +2573,6 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: const scopes = context.scopes; const tree = context.tree; const tags = tree.nodes.items(.tag); - const token_tags = tree.tokens.items(.tag); - const data = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const node_tag = tags[node_idx]; try scopes.append(allocator, .{ .loc = offsets.nodeToLoc(tree, node_idx), @@ -2587,32 +2580,9 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: }); const scope_index = scopes.len - 1; - if (node_tag == .error_set_decl) { - // All identifiers in main_token..data.lhs are error fields. - var i = main_tokens[node_idx]; - while (i < data[node_idx].rhs) : (i += 1) { - if (token_tags[i] == .identifier) { - const name = offsets.tokenToSlice(tree, i); - if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| { - // TODO Record a redefinition error. - } - const gop = try context.errors.getOrPut(allocator, .{ - .label = name, - .kind = .Constant, - //.detail = - .insertText = name, - .insertTextFormat = .PlainText, - }); - if (!gop.found_existing) { - gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name}); - } - } - } - } - var buf: [2]Ast.Node.Index = undefined; - const ast_decls = ast.declMembers(tree, node_idx, &buf); - for (ast_decls) |decl| { + const container_decl = tree.fullContainerDecl(&buf, node_idx).?; + for (container_decl.ast.members) |decl| { if (tags[decl] == .@"usingnamespace") { try scopes.items(.uses)[scope_index].append(allocator, decl); continue; @@ -2630,9 +2600,6 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: // TODO Record a redefinition error. } - var buffer: [2]Ast.Node.Index = undefined; - const container_decl = ast.containerDecl(tree, node_idx, &buffer) orelse continue; - if (container_decl.ast.enum_token != null) { if (std.mem.eql(u8, name, "_")) return; const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation); @@ -2684,10 +2651,37 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .tagged_union_enum_tag, .tagged_union_enum_tag_trailing, .root, - .error_set_decl, => { try makeInnerScope(allocator, context, node_idx); }, + .error_set_decl => { + try scopes.append(allocator, .{ + .loc = offsets.nodeToLoc(tree, node_idx), + .data = .{ .container = node_idx }, + }); + const scope_index = scopes.len - 1; + + // All identifiers in main_token..data.lhs are error fields. + var i = main_tokens[node_idx]; + while (i < data[node_idx].rhs) : (i += 1) { + if (token_tags[i] == .identifier) { + const name = offsets.tokenToSlice(tree, i); + if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| { + // TODO Record a redefinition error. + } + const gop = try context.errors.getOrPut(allocator, .{ + .label = name, + .kind = .Constant, + //.detail = + .insertText = name, + .insertTextFormat = .PlainText, + }); + if (!gop.found_existing) { + gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name}); + } + } + } + }, .array_type_sentinel => { // TODO: ??? return; @@ -2699,7 +2693,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .fn_decl, => |fn_tag| { var buf: [1]Ast.Node.Index = undefined; - const func = ast.fnProto(tree, node_idx, &buf).?; + const func = tree.fullFnProto(&buf, node_idx).?; try scopes.append(allocator, .{ .loc = offsets.nodeToLoc(tree, node_idx), @@ -2779,7 +2773,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i } try makeScopeInternal(allocator, context, idx); - if (ast.varDecl(tree, idx)) |var_decl| { + if (tree.fullVarDecl(idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = idx })) |existing| { _ = existing; @@ -2793,7 +2787,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .@"if", .if_simple, => { - const if_node = ast.ifFull(tree, node_idx); + const if_node = ast.fullIf(tree, node_idx).?; if (if_node.payload_token) |payload| { try scopes.append(allocator, .{ @@ -2867,7 +2861,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .@"for", .for_simple, => { - const while_node = ast.whileAst(tree, node_idx).?; + const while_node = ast.fullWhile(tree, node_idx).?; const is_for = node_tag == .@"for" or node_tag == .for_simple; if (while_node.label_token) |label| { @@ -2955,11 +2949,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i const cases = tree.extra_data[extra.start..extra.end]; for (cases) |case| { - const switch_case: Ast.full.SwitchCase = switch (tags[case]) { - .switch_case => tree.switchCase(case), - .switch_case_one => tree.switchCaseOne(case), - else => continue, - }; + const switch_case: Ast.full.SwitchCase = tree.fullSwitchCase(case).?; if (switch_case.payload_token) |payload| { try scopes.append(allocator, .{ @@ -3000,7 +2990,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .aligned_var_decl, .simple_var_decl, => { - const var_decl = ast.varDecl(tree, node_idx).?; + const var_decl = tree.fullVarDecl(node_idx).?; if (var_decl.ast.type_node != 0) { try makeScopeInternal(allocator, context, var_decl.ast.type_node); } @@ -3019,7 +3009,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .async_call_one_comma, => { var buf: [1]Ast.Node.Index = undefined; - const call = ast.callFull(tree, node_idx, &buf).?; + const call = tree.fullCall(&buf, node_idx).?; try makeScopeInternal(allocator, context, call.ast.fn_expr); for (call.ast.params) |param| @@ -3035,13 +3025,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .struct_init_one_comma, => { var buf: [2]Ast.Node.Index = undefined; - const struct_init: Ast.full.StructInit = switch (node_tag) { - .struct_init, .struct_init_comma => tree.structInit(node_idx), - .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx), - .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node_idx), - else => unreachable, - }; + const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node_idx).?; if (struct_init.ast.type_expr != 0) try makeScopeInternal(allocator, context, struct_init.ast.type_expr); @@ -3060,13 +3044,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .array_init_one_comma, => { var buf: [2]Ast.Node.Index = undefined; - const array_init: Ast.full.ArrayInit = switch (node_tag) { - .array_init, .array_init_comma => tree.arrayInit(node_idx), - .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx), - .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node_idx), - else => unreachable, - }; + const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node_idx).?; if (array_init.ast.type_expr != 0) try makeScopeInternal(allocator, context, array_init.ast.type_expr); @@ -3078,7 +3056,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .container_field_align, .container_field_init, => { - const field = ast.containerField(tree, node_idx).?; + const field = tree.fullContainerField(node_idx).?; try makeScopeInternal(allocator, context, field.ast.type_expr); try makeScopeInternal(allocator, context, field.ast.align_expr); @@ -3101,7 +3079,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node_idx).?; + const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node_idx).?; try makeScopeInternal(allocator, context, ptr_type.ast.sentinel); try makeScopeInternal(allocator, context, ptr_type.ast.align_node); @@ -3111,12 +3089,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .slice_open, .slice_sentinel, => { - const slice: Ast.full.Slice = switch (node_tag) { - .slice => tree.slice(node_idx), - .slice_open => tree.sliceOpen(node_idx), - .slice_sentinel => tree.sliceSentinel(node_idx), - else => unreachable, - }; + const slice: Ast.full.Slice = tree.fullSlice(node_idx).?; + try makeScopeInternal(allocator, context, slice.ast.sliced); try makeScopeInternal(allocator, context, slice.ast.start); try makeScopeInternal(allocator, context, slice.ast.end); diff --git a/src/ast.zig b/src/ast.zig index 5c7f5ce..cfbf935 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -7,7 +7,7 @@ const Ast = std.zig.Ast; const Node = Ast.Node; const full = Ast.full; -fn fullPtrType(tree: Ast, info: full.PtrType.Components) full.PtrType { +fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType { const token_tags = tree.tokens.items(.tag); const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) { .asterisk, @@ -57,7 +57,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType { std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrType); - return fullPtrType(tree, .{ + return fullPtrTypeComponents(tree, .{ .main_token = tree.nodes.items(.main_token)[node], .align_node = extra.align_node, .addrspace_node = extra.addrspace_node, @@ -71,7 +71,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType { pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType { std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel); const data = tree.nodes.items(.data)[node]; - return fullPtrType(tree, .{ + return fullPtrTypeComponents(tree, .{ .main_token = tree.nodes.items(.main_token)[node], .align_node = 0, .addrspace_node = 0, @@ -85,7 +85,7 @@ pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType { pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType { std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned); const data = tree.nodes.items(.data)[node]; - return fullPtrType(tree, .{ + return fullPtrTypeComponents(tree, .{ .main_token = tree.nodes.items(.main_token)[node], .align_node = data.lhs, .addrspace_node = 0, @@ -100,7 +100,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType { std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); - return fullPtrType(tree, .{ + return fullPtrTypeComponents(tree, .{ .main_token = tree.nodes.items(.main_token)[node], .align_node = extra.align_node, .addrspace_node = extra.addrspace_node, @@ -111,7 +111,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType { }); } -fn fullIf(tree: Ast, info: full.If.Components) full.If { +fn fullIfComponents(tree: Ast, info: full.If.Components) full.If { const token_tags = tree.tokens.items(.tag); var result: full.If = .{ .ast = info, @@ -137,27 +137,29 @@ fn fullIf(tree: Ast, info: full.If.Components) full.If { } pub fn ifFull(tree: Ast, node: Node.Index) full.If { + std.debug.assert(tree.nodes.items(.tag)[node] == .@"if"); const data = tree.nodes.items(.data)[node]; - if (tree.nodes.items(.tag)[node] == .@"if") { - const extra = tree.extraData(data.rhs, Node.If); - return fullIf(tree, .{ - .cond_expr = data.lhs, - .then_expr = extra.then_expr, - .else_expr = extra.else_expr, - .if_token = tree.nodes.items(.main_token)[node], - }); - } else { - std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple); - return fullIf(tree, .{ - .cond_expr = data.lhs, - .then_expr = data.rhs, - .else_expr = 0, - .if_token = tree.nodes.items(.main_token)[node], - }); - } + const extra = tree.extraData(data.rhs, Node.If); + return fullIfComponents(tree, .{ + .cond_expr = data.lhs, + .then_expr = extra.then_expr, + .else_expr = extra.else_expr, + .if_token = tree.nodes.items(.main_token)[node], + }); } -fn fullWhile(tree: Ast, info: full.While.Components) full.While { +pub fn ifSimple(tree: Ast, node: Node.Index) full.If { + std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple); + const data = tree.nodes.items(.data)[node]; + return fullIfComponents(tree, .{ + .cond_expr = data.lhs, + .then_expr = data.rhs, + .else_expr = 0, + .if_token = tree.nodes.items(.main_token)[node], + }); +} + +fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While { const token_tags = tree.tokens.items(.tag); var result: full.While = .{ .ast = info, @@ -194,7 +196,7 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While { pub fn whileSimple(tree: Ast, node: Node.Index) full.While { const data = tree.nodes.items(.data)[node]; - return fullWhile(tree, .{ + return fullWhileComponents(tree, .{ .while_token = tree.nodes.items(.main_token)[node], .cond_expr = data.lhs, .cont_expr = 0, @@ -206,7 +208,7 @@ pub fn whileSimple(tree: Ast, node: Node.Index) full.While { pub fn whileCont(tree: Ast, node: Node.Index) full.While { const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.WhileCont); - return fullWhile(tree, .{ + return fullWhileComponents(tree, .{ .while_token = tree.nodes.items(.main_token)[node], .cond_expr = data.lhs, .cont_expr = extra.cont_expr, @@ -218,7 +220,7 @@ pub fn whileCont(tree: Ast, node: Node.Index) full.While { pub fn whileFull(tree: Ast, node: Node.Index) full.While { const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.While); - return fullWhile(tree, .{ + return fullWhileComponents(tree, .{ .while_token = tree.nodes.items(.main_token)[node], .cond_expr = data.lhs, .cont_expr = extra.cont_expr, @@ -229,7 +231,7 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While { pub fn forSimple(tree: Ast, node: Node.Index) full.While { const data = tree.nodes.items(.data)[node]; - return fullWhile(tree, .{ + return fullWhileComponents(tree, .{ .while_token = tree.nodes.items(.main_token)[node], .cond_expr = data.lhs, .cont_expr = 0, @@ -241,7 +243,7 @@ pub fn forSimple(tree: Ast, node: Node.Index) full.While { pub fn forFull(tree: Ast, node: Node.Index) full.While { const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.If); - return fullWhile(tree, .{ + return fullWhileComponents(tree, .{ .while_token = tree.nodes.items(.main_token)[node], .cond_expr = data.lhs, .cont_expr = 0, @@ -250,6 +252,35 @@ pub fn forFull(tree: Ast, node: Node.Index) full.While { }); } +pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType { + return switch (tree.nodes.items(.tag)[node]) { + .ptr_type_aligned => tree.ptrTypeAligned(node), + .ptr_type_sentinel => tree.ptrTypeSentinel(node), + .ptr_type => tree.ptrType(node), + .ptr_type_bit_range => tree.ptrTypeBitRange(node), + else => null, + }; +} + +pub fn fullIf(tree: Ast, node: Node.Index) ?full.If { + return switch (tree.nodes.items(.tag)[node]) { + .if_simple => tree.ifSimple(node), + .@"if" => tree.ifFull(node), + else => null, + }; +} + +pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While { + return switch (tree.nodes.items(.tag)[node]) { + .while_simple => tree.whileSimple(node), + .while_cont => tree.whileCont(node), + .@"while" => tree.whileFull(node), + .for_simple => tree.forSimple(node), + .@"for" => tree.forFull(node), + else => null, + }; +} + pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex { const TokenIndex = Ast.TokenIndex; const tags = tree.nodes.items(.tag); @@ -911,36 +942,6 @@ pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex { return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr); } -pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField { - return switch (tree.nodes.items(.tag)[node]) { - .container_field => tree.containerField(node), - .container_field_init => tree.containerFieldInit(node), - .container_field_align => tree.containerFieldAlign(node), - else => null, - }; -} - -pub fn ptrType(tree: Ast, node: Ast.Node.Index) ?Ast.full.PtrType { - return switch (tree.nodes.items(.tag)[node]) { - .ptr_type => ptrTypeSimple(tree, node), - .ptr_type_aligned => ptrTypeAligned(tree, node), - .ptr_type_bit_range => ptrTypeBitRange(tree, node), - .ptr_type_sentinel => ptrTypeSentinel(tree, node), - else => null, - }; -} - -pub fn whileAst(tree: Ast, node: Ast.Node.Index) ?Ast.full.While { - return switch (tree.nodes.items(.tag)[node]) { - .@"while" => whileFull(tree, node), - .while_simple => whileSimple(tree, node), - .while_cont => whileCont(tree, node), - .@"for" => forFull(tree, node), - .for_simple => forSimple(tree, node), - else => null, - }; -} - pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool { return switch (tree.nodes.items(.tag)[node]) { .container_decl, @@ -962,58 +963,6 @@ pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool { }; } -pub fn containerDecl(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) ?full.ContainerDecl { - return switch (tree.nodes.items(.tag)[node_idx]) { - .container_decl, .container_decl_trailing => tree.containerDecl(node_idx), - .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx), - .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx), - .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx), - .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx), - .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx), - else => null, - }; -} - -/// Returns the member indices of a given declaration container. -/// Asserts given `tag` is a container node -pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index { - std.debug.assert(isContainer(tree, node_idx)); - return switch (tree.nodes.items(.tag)[node_idx]) { - .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, - .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members, - .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx).ast.members, - .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members, - .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, - .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members, - .root => tree.rootDecls(), - .error_set_decl => &[_]Ast.Node.Index{}, - else => unreachable, - }; -} - -/// Returns an `ast.full.VarDecl` for a given node index. -/// Returns null if the tag doesn't match -pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl { - return switch (tree.nodes.items(.tag)[node_idx]) { - .global_var_decl => tree.globalVarDecl(node_idx), - .local_var_decl => tree.localVarDecl(node_idx), - .aligned_var_decl => tree.alignedVarDecl(node_idx), - .simple_var_decl => tree.simpleVarDecl(node_idx), - else => null, - }; -} - -pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool { - return switch (tree.nodes.items(.tag)[node]) { - .ptr_type, - .ptr_type_aligned, - .ptr_type_bit_range, - .ptr_type_sentinel, - => true, - else => false, - }; -} - pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool { return switch (tree.nodes.items(.tag)[node]) { .builtin_call, @@ -1051,45 +1000,6 @@ pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool { }; } -pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool { - return switch (tree.nodes.items(.tag)[node]) { - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - => false, - .fn_decl => true, - else => null, - }; -} - -pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto { - return switch (tree.nodes.items(.tag)[node]) { - .fn_proto => tree.fnProto(node), - .fn_proto_multi => tree.fnProtoMulti(node), - .fn_proto_one => tree.fnProtoOne(buf, node), - .fn_proto_simple => tree.fnProtoSimple(buf, node), - .fn_decl => fnProto(tree, tree.nodes.items(.data)[node].lhs, buf), - else => null, - }; -} - -pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call { - return switch (tree.nodes.items(.tag)[node]) { - .call, - .call_comma, - .async_call, - .async_call_comma, - => tree.callFull(node), - .call_one, - .call_one_comma, - .async_call_one, - .async_call_one_comma, - => tree.callOne(buf, node), - else => null, - }; -} - /// returns a list of parameters pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index { const node_data = tree.nodes.items(.data); diff --git a/src/inlay_hints.zig b/src/inlay_hints.zig index b1351ee..ef3fc23 100644 --- a/src/inlay_hints.zig +++ b/src/inlay_hints.zig @@ -84,7 +84,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc switch (decl.*) { .ast_node => |fn_node| { var buffer: [1]Ast.Node.Index = undefined; - if (ast.fnProto(decl_tree, fn_node, &buffer)) |fn_proto| { + if (decl_tree.fullFnProto(&buffer, fn_node)) |fn_proto| { var i: usize = 0; var it = fn_proto.iterate(&decl_tree); @@ -282,7 +282,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .async_call_comma, => { var params: [1]Ast.Node.Index = undefined; - const call = ast.callFull(tree, node, ¶ms).?; + const call = tree.fullCall(¶ms, node).?; try writeCallNodeHint(builder, arena, store, call); for (call.ast.params) |param| { @@ -351,7 +351,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .ptr_type, .ptr_type_bit_range, => { - const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node).?; + const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node).?; if (ptr_type.ast.sentinel != 0) { return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range }); @@ -458,12 +458,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .slice, .slice_sentinel, => { - const slice: Ast.full.Slice = switch (tag) { - .slice => tree.slice(node), - .slice_open => tree.sliceOpen(node), - .slice_sentinel => tree.sliceSentinel(node), - else => unreachable, - }; + const slice: Ast.full.Slice = tree.fullSlice(node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range }); @@ -481,13 +476,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .array_init_comma, => { var buffer: [2]Ast.Node.Index = undefined; - const array_init: Ast.full.ArrayInit = switch (tag) { - .array_init, .array_init_comma => tree.arrayInit(node), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(buffer[0..1], node), - .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), - .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buffer, node), - else => unreachable, - }; + const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buffer, node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range }); for (array_init.ast.elements) |elem| { @@ -505,13 +494,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .struct_init_comma, => { var buffer: [2]Ast.Node.Index = undefined; - const struct_init: Ast.full.StructInit = switch (tag) { - .struct_init, .struct_init_comma => tree.structInit(node), - .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(buffer[0..1], node), - .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buffer, node), - else => unreachable, - }; + const struct_init: Ast.full.StructInit = tree.fullStructInit(&buffer, node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range }); @@ -546,7 +529,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .switch_case_inline_one, .switch_case_inline, => { - const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node); + const switch_case = tree.fullSwitchCase(node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range }); }, @@ -557,7 +540,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .for_simple, .@"for", => { - const while_node = ast.whileAst(tree, node).?; + const while_node = ast.fullWhile(tree, node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range }); @@ -571,7 +554,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .if_simple, .@"if", => { - const if_node = ast.ifFull(tree, node); + const if_node = ast.fullIf(tree, node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range }); @@ -584,7 +567,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .fn_decl, => { var buffer: [1]Ast.Node.Index = undefined; - const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buffer).?; + const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buffer, node).?; var it = fn_proto.iterate(&tree); while (ast.nextFnParam(&it)) |param_decl| { @@ -617,7 +600,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .tagged_union_enum_tag_trailing, => { var buffer: [2]Ast.Node.Index = undefined; - const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buffer).?; + const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buffer, node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range }); @@ -634,7 +617,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .container_field_align, .container_field, => { - const container_field = ast.containerField(tree, node).?; + const container_field = tree.fullContainerField(node).?; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range }); @@ -666,11 +649,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: .asm_output, .asm_input, => { - const asm_node: Ast.full.Asm = switch (tag) { - .@"asm" => tree.asmFull(node), - .asm_simple => tree.asmSimple(node), - else => return, - }; + const asm_node: Ast.full.Asm = tree.fullAsm(node) orelse return; try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range }); }, @@ -700,8 +679,7 @@ pub fn writeRangeInlayHint( .encoding = encoding, }; - var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(handle.tree, 0, &buf)) |child| { + for (handle.tree.rootDecls()) |child| { if (!isNodeInRange(handle.tree, child, range)) continue; try writeNodeInlayHint(&builder, arena, store, child, range); } diff --git a/src/references.zig b/src/references.zig index 9bc24b0..07f9cca 100644 --- a/src/references.zig +++ b/src/references.zig @@ -118,18 +118,19 @@ fn symbolReferencesInternal( .tagged_union_enum_tag, .tagged_union_enum_tag_trailing, .root, - .error_set_decl, => { var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(tree, node, &buf)) |member| + const container_decl = tree.fullContainerDecl(&buf, node).?; + for (container_decl.ast.members) |member| try symbolReferencesInternal(builder, member, handle, false); }, + .error_set_decl => {}, .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl, => { - const var_decl = ast.varDecl(tree, node).?; + const var_decl = tree.fullVarDecl(node).?; try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false); try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false); }, @@ -137,7 +138,7 @@ fn symbolReferencesInternal( .container_field_align, .container_field_init, => { - const field = ast.containerField(tree, node).?; + const field = tree.fullContainerField(node).?; try symbolReferencesInternal(builder, field.ast.type_expr, handle, false); try symbolReferencesInternal(builder, field.ast.value_expr, handle, false); }, @@ -152,7 +153,7 @@ fn symbolReferencesInternal( .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const fn_proto = ast.fnProto(tree, node, &buf).?; + const fn_proto = tree.fullFnProto(&buf, node).?; var it = fn_proto.iterate(&tree); while (ast.nextFnParam(&it)) |param| { try symbolReferencesInternal(builder, param.type_expr, handle, false); @@ -179,16 +180,10 @@ fn symbolReferencesInternal( }, .switch_case_one, .switch_case_inline_one, - => { - const case_one = tree.switchCaseOne(node); - try symbolReferencesInternal(builder, case_one.ast.target_expr, handle, false); - for (case_one.ast.values) |val| - try symbolReferencesInternal(builder, val, handle, false); - }, .switch_case, .switch_case_inline, => { - const case = tree.switchCase(node); + const case = tree.fullSwitchCase(node).?; try symbolReferencesInternal(builder, case.ast.target_expr, handle, false); for (case.ast.values) |val| try symbolReferencesInternal(builder, val, handle, false); @@ -199,7 +194,7 @@ fn symbolReferencesInternal( .for_simple, .@"for", => { - const loop = ast.whileAst(tree, node).?; + const loop = ast.fullWhile(tree, node).?; try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false); try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false); try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false); @@ -208,7 +203,7 @@ fn symbolReferencesInternal( .@"if", .if_simple, => { - const if_node = ast.ifFull(tree, node); + const if_node = ast.fullIf(tree, node).?; try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false); try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false); try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false); @@ -218,7 +213,7 @@ fn symbolReferencesInternal( .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = ast.ptrType(tree, node).?; + const ptr_type = ast.fullPtrType(tree, node).?; if (ptr_type.ast.align_node != 0) { try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false); @@ -239,15 +234,10 @@ fn symbolReferencesInternal( .array_init_one_comma, .array_init_dot_two, .array_init_dot_two_comma, - => |tag| { + => { var buf: [2]Ast.Node.Index = undefined; - const array_init = switch (tag) { - .array_init, .array_init_comma => tree.arrayInit(node), - .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), - .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), - else => unreachable, - }; + const array_init = tree.fullArrayInit(&buf, node).?; + try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false); for (array_init.ast.elements) |e| try symbolReferencesInternal(builder, e, handle, false); @@ -260,15 +250,10 @@ fn symbolReferencesInternal( .struct_init_dot_two_comma, .struct_init_one, .struct_init_one_comma, - => |tag| { + => { var buf: [2]Ast.Node.Index = undefined; - const struct_init: Ast.full.StructInit = switch (tag) { - .struct_init, .struct_init_comma => tree.structInit(node), - .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), - .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), - else => unreachable, - }; + const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?; + try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false); for (struct_init.ast.fields) |field| try symbolReferencesInternal(builder, field, handle, false); @@ -283,7 +268,7 @@ fn symbolReferencesInternal( .async_call_one_comma, => { var buf: [1]Ast.Node.Index = undefined; - const call = ast.callFull(tree, node, &buf).?; + const call = tree.fullCall(&buf, node).?; try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false); @@ -294,13 +279,8 @@ fn symbolReferencesInternal( .slice, .slice_sentinel, .slice_open, - => |tag| { - const slice: Ast.full.Slice = switch (tag) { - .slice => tree.slice(node), - .slice_open => tree.sliceOpen(node), - .slice_sentinel => tree.sliceSentinel(node), - else => unreachable, - }; + => { + const slice: Ast.full.Slice = tree.fullSlice(node).?; try symbolReferencesInternal(builder, slice.ast.sliced, handle, false); try symbolReferencesInternal(builder, slice.ast.start, handle, false); @@ -520,7 +500,7 @@ pub fn symbolReferences( const proto = scope_data.function; var buf: [1]Ast.Node.Index = undefined; - const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?; + const fn_proto = curr_handle.tree.fullFnProto(&buf, proto).?; var it = fn_proto.iterate(&curr_handle.tree); while (ast.nextFnParam(&it)) |candidate| { diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 788cd50..0484971 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -339,7 +339,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .simple_var_decl, .aligned_var_decl, => { - const var_decl = ast.varDecl(tree, node).?; + const var_decl = tree.fullVarDecl(node).?; if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| try writeDocComments(builder, tree, comment_idx); @@ -386,7 +386,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .tagged_union_two_trailing, => { var buf: [2]Ast.Node.Index = undefined; - const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buf).?; + const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buf, node).?; try writeToken(builder, decl.layout_token, .keyword); try writeToken(builder, decl.ast.main_token, .keyword); @@ -446,7 +446,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buf).?; + const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?; if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| try writeDocComments(builder, tree, docs); @@ -523,7 +523,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .switch_case_inline_one, .switch_case_inline, => { - const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node); + const switch_case = tree.fullSwitchCase(node).?; try writeToken(builder, switch_case.inline_token, .keyword); for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node }); // check it it's 'else' @@ -541,7 +541,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .for_simple, .@"for", => { - const while_node = ast.whileAst(tree, node).?; + const while_node = ast.fullWhile(tree, node).?; try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.inline_token, .keyword); try writeToken(builder, while_node.ast.while_token, .keyword); @@ -575,7 +575,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .@"if", .if_simple, => { - const if_node = ast.ifFull(tree, node); + const if_node = ast.fullIf(tree, node).?; try writeToken(builder, if_node.ast.if_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr }); @@ -609,13 +609,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .array_init_dot_two_comma, => { var buf: [2]Ast.Node.Index = undefined; - const array_init: Ast.full.ArrayInit = switch (tag) { - .array_init, .array_init_comma => tree.arrayInit(node), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), - .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), - .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), - else => unreachable, - }; + const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node).?; try callWriteNodeTokens(allocator, .{ builder, array_init.ast.type_expr }); for (array_init.ast.elements) |elem| try callWriteNodeTokens(allocator, .{ builder, elem }); @@ -630,13 +624,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .struct_init_dot_two_comma, => { var buf: [2]Ast.Node.Index = undefined; - const struct_init: Ast.full.StructInit = switch (tag) { - .struct_init, .struct_init_comma => tree.structInit(node), - .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), - .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), - else => unreachable, - }; + const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?; var field_token_type: ?TokenType = null; @@ -674,7 +662,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .async_call_one_comma, => { var params: [1]Ast.Node.Index = undefined; - const call = ast.callFull(tree, node, ¶ms).?; + const call = tree.fullCall(¶ms, node).?; try writeToken(builder, call.async_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr }); @@ -690,12 +678,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .slice_open, .slice_sentinel, => { - const slice: Ast.full.Slice = switch (tag) { - .slice => tree.slice(node), - .slice_open => tree.sliceOpen(node), - .slice_sentinel => tree.sliceSentinel(node), - else => unreachable, - }; + const slice: Ast.full.Slice = tree.fullSlice(node).?; try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced }); try callWriteNodeTokens(allocator, .{ builder, slice.ast.start }); @@ -772,11 +755,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .asm_input, .asm_simple, => { - const asm_node: Ast.full.Asm = switch (tag) { - .@"asm" => tree.asmFull(node), - .asm_simple => tree.asmSimple(node), - else => return, // TODO Inputs, outputs - }; + const asm_node: Ast.full.Asm = tree.fullAsm(node).?; try writeToken(builder, main_token, .keyword); try writeToken(builder, asm_node.volatile_token, .keyword); @@ -920,7 +899,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = ast.ptrType(tree, node).?; + const ptr_type = ast.fullPtrType(tree, node).?; if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and main_token == main_tokens[ptr_type.ast.child_type]) @@ -955,10 +934,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr .array_type, .array_type_sentinel, => { - const array_type: Ast.full.ArrayType = if (tag == .array_type) - tree.arrayType(node) - else - tree.arrayTypeSentinel(node); + const array_type: Ast.full.ArrayType = tree.fullArrayType(node).?; try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count }); try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel }); @@ -988,7 +964,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void { const tree = builder.handle.tree; - const container_field = ast.containerField(tree, node).?; + const container_field = tree.fullContainerField(node).?; const base = tree.nodes.items(.main_token)[node]; const tokens = tree.tokens.items(.tag); @@ -1033,8 +1009,7 @@ pub fn writeAllSemanticTokens( var builder = Builder.init(arena, store, handle, encoding); // reverse the ast from the root declarations - var buf: [2]Ast.Node.Index = undefined; - for (ast.declMembers(handle.tree, 0, &buf)) |child| { + for (handle.tree.rootDecls()) |child| { writeNodeTokens(&builder, child) catch |err| switch (err) { error.MovedBackwards => break, else => |e| return e, diff --git a/src/signature_help.zig b/src/signature_help.zig index e47e72d..b14bf17 100644 --- a/src/signature_help.zig +++ b/src/signature_help.zig @@ -275,7 +275,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl }; var buf: [1]Ast.Node.Index = undefined; - if (ast.fnProto(type_handle.handle.tree, node, &buf)) |proto| { + if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| { return try fnProtoToSignatureInfo( document_store, arena, @@ -327,7 +327,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl } } - if (ast.fnProto(res_handle.tree, node, &buf)) |proto| { + if (res_handle.tree.fullFnProto(&buf, node)) |proto| { return try fnProtoToSignatureInfo( document_store, arena, diff --git a/src/translate_c.zig b/src/translate_c.zig index 6c9f864..cf3d2df 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -69,8 +69,8 @@ fn convertCIncludeInternal( var writer = output.writer(allocator); var buffer: [2]Ast.Node.Index = undefined; - if (ast.isBlock(tree, node)) { - for (ast.blockStatements(tree, node, &buffer).?) |statement| { + if (ast.blockStatements(tree, node, &buffer)) |statements| { + for (statements) |statement| { try callConvertCIncludeInternal(stack_allocator, .{ allocator, stack_allocator, tree, statement, output }); } } else if (ast.builtinCallParams(tree, node, &buffer)) |params| {