diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml
new file mode 100644
index 0000000..07eae8a
--- /dev/null
+++ b/.github/workflows/fuzz.yml
@@ -0,0 +1,115 @@
+name: Sus Fuzzing
+
+on:
+ # pull_request_target can be dangerous but necessary here to access secrets.
+ # I'm pretty comfortable using it because:
+ # - We limit all permissions (including GITHUB_TOKEN) to read-only
+ # - We limit runs to labelled PRs only which prevents random exploitation
+ # - We don't expose secrets in environment variables which makes exploitation much more difficult
+ # - The secrets that we reference aren't all that important anyways (they can only access our DigitalOcean Space)
+ pull_request_target:
+ types: [labeled, synchronize]
+ push:
+ paths:
+ - "**.zig"
+ branches:
+ - master
+ schedule:
+ - cron: "0 0 * * *"
+ workflow_dispatch:
+ inputs:
+ fuzzing_duration:
+ type: string
+ description: How long should fuzzing last? (sleep time argument)
+ default: 15m
+
+permissions: read-all
+
+jobs:
+ fuzz:
+ if: github.repository_owner == 'zigtools' && (github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz'))
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Set Swap Space
+ uses: pierotofy/set-swap-space@master
+ with:
+ swap-size-gb: 10
+
+ - name: Default fuzzing duration
+ if: github.event_name != 'pull_request_target'
+ run: |
+ echo "FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }}" >> $GITHUB_ENV
+
+ - name: PR fuzzing duration
+ if: github.event_name == 'pull_request_target'
+ run: |
+ echo "FUZZING_DURATION=15m" >> $GITHUB_ENV
+
+ - name: Grab zig
+ uses: goto-bus-stop/setup-zig@v1
+ with:
+ version: master
+
+ - run: zig version
+ - run: zig env
+
+ - name: Checkout zig
+ uses: actions/checkout@v3
+ with:
+ path: zig
+ repository: "ziglang/zig"
+ fetch-depth: 0
+
+ - name: Checkout zls
+ uses: actions/checkout@v3
+ with:
+ path: zls
+ fetch-depth: 0
+ submodules: true
+
+ - name: Build zls
+ run: |
+ cd $GITHUB_WORKSPACE/zls
+ pwd
+ zig build
+
+ - name: Checkout sus
+ uses: actions/checkout@v3
+ with:
+ path: sus
+ repository: "zigtools/sus"
+ fetch-depth: 0
+ submodules: recursive
+
+ - name: Build sus
+ run: |
+ cd $GITHUB_WORKSPACE/sus
+ pwd
+ zig build -Drelease-fast
+
+ - name: Run sus
+ continue-on-error: true
+ run: |
+ cd $GITHUB_WORKSPACE/sus
+ FUZZING_DURATION=${{ env.FUZZING_DURATION }}
+ { sleep ${FUZZING_DURATION:-1h}; pkill -9 sus; } &
+ ./zig-out/bin/sus $GITHUB_WORKSPACE/zls/zig-out/bin/zls markov $GITHUB_WORKSPACE/zig/lib/std
+
+ - name: Upload saved logs
+ uses: actions/upload-artifact@v3
+ with:
+ name: saved-logs
+ path: sus/saved_logs/
+
+ - uses: BetaHuhn/do-spaces-action@v2
+ with:
+ access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
+ secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
+ space_name: fuzzing-output
+ space_region: nyc3
+ source: sus/saved_logs/
+ out_dir: ${{ github.event.pull_request.head.repo.full_name || github.repository }}/${{ github.head_ref || github.ref_name }}/${{ github.event.pull_request.head.sha || github.sha }}
+
+
+
\ No newline at end of file
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 2df721f..5ad4166 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -122,19 +122,3 @@ jobs:
REMOTE_HOST: ${{ secrets.WEBSITE_DEPLOY_HOST }}
REMOTE_USER: ${{ secrets.WEBSITE_DEPLOY_USER }}
TARGET: ${{ secrets.WEBSITE_DEPLOY_FOLDER }}
-
- - name: Instruct fuzzing server to pull latest zls
- if: ${{ matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/master' && github.repository_owner == 'zigtools' }}
- uses: appleboy/ssh-action@v0.1.7
- with:
- host: fuzzing.zigtools.org
- username: ${{ secrets.FUZZING_SSH_USERNAME }}
- key: ${{ secrets.FUZZING_SSH_PRIVKEY }}
- script: |
- systemctl stop fuzzing
- systemctl stop fuzzing-web
- cd /root/sus
- ./script/setup.sh
- systemctl start fuzzing
- sleep 5s
- systemctl start fuzzing-web
diff --git a/build.zig b/build.zig
index 5e9d480..9410ce4 100644
--- a/build.zig
+++ b/build.zig
@@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
pub fn build(b: *std.build.Builder) !void {
comptime {
const current_zig = builtin.zig_version;
- const min_zig = std.SemanticVersion.parse("0.11.0-dev.874+40ed6ae84") catch return; // Changes to builtin.Type API
+ const min_zig = std.SemanticVersion.parse("0.11.0-dev.1254+1f8f79cd5") catch return; // add helper functions to std.zig.Ast
if (current_zig.order(min_zig) == .lt) {
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
}
diff --git a/src/ComptimeInterpreter.zig b/src/ComptimeInterpreter.zig
index 49cc24e..40f9f4e 100644
--- a/src/ComptimeInterpreter.zig
+++ b/src/ComptimeInterpreter.zig
@@ -235,9 +235,10 @@ pub fn interpret(
defer fields.deinit(interpreter.allocator);
var buffer: [2]Ast.Node.Index = undefined;
- const members = ast.declMembers(tree, node_idx, &buffer);
- for (members) |member| {
- const container_field = ast.containerField(tree, member) orelse {
+
+ const container_decl = tree.fullContainerDecl(&buffer, node_idx).?;
+ for (container_decl.ast.members) |member| {
+ const container_field = tree.fullContainerField(member) orelse {
_ = try interpreter.interpret(member, container_namespace, options);
continue;
};
@@ -304,7 +305,7 @@ pub fn interpret(
if (decls.contains(name))
return InterpretResult{ .nothing = {} };
- const decl = ast.varDecl(tree, node_idx).?;
+ const decl = tree.fullVarDecl(node_idx).?;
const type_value = if (decl.ast.type_node != 0) (try interpreter.interpret(decl.ast.type_node, namespace, .{})).maybeGetValue() else null;
const init_value = if (decl.ast.init_node != 0) (try interpreter.interpret(decl.ast.init_node, namespace, .{})).maybeGetValue() else null;
@@ -651,7 +652,7 @@ pub fn interpret(
.@"if",
.if_simple,
=> {
- const if_info = ast.ifFull(tree, node_idx);
+ const if_info = ast.fullIf(tree, node_idx).?;
// TODO: Don't evaluate runtime ifs
// if (options.observe_values) {
const ir = try interpreter.interpret(if_info.ast.cond_expr, namespace, options);
@@ -942,7 +943,7 @@ pub fn interpret(
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const func = ast.fnProto(tree, node_idx, &buf).?;
+ const func = tree.fullFnProto(&buf, node_idx).?;
// TODO: Resolve function type
@@ -1008,7 +1009,7 @@ pub fn interpret(
.async_call_one_comma,
=> {
var params: [1]Ast.Node.Index = undefined;
- const call_full = ast.callFull(tree, node_idx, ¶ms).?;
+ const call_full = tree.fullCall(¶ms, node_idx) orelse unreachable;
var args = try std.ArrayListUnmanaged(Value).initCapacity(interpreter.allocator, call_full.ast.params.len);
defer args.deinit(interpreter.allocator);
@@ -1127,7 +1128,7 @@ pub fn call(
const tree = interpreter.getHandle().tree;
var buf: [1]Ast.Node.Index = undefined;
- var proto = ast.fnProto(tree, func_node_idx, &buf) orelse return error.CriticalAstFailure;
+ var proto = tree.fullFnProto(&buf, func_node_idx) orelse return error.CriticalAstFailure;
// TODO: Make argument namespace to evaluate arguments in
try interpreter.namespaces.append(interpreter.allocator, .{
diff --git a/src/Server.zig b/src/Server.zig
index d5a08b8..d5e5271 100644
--- a/src/Server.zig
+++ b/src/Server.zig
@@ -274,7 +274,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
.fn_decl,
=> blk: {
var buf: [1]Ast.Node.Index = undefined;
- const func = ast.fnProto(tree, decl_idx, &buf).?;
+ const func = tree.fullFnProto(&buf, decl_idx).?;
if (func.extern_export_inline_token != null) break :blk;
if (func.name_token) |name_token| {
@@ -666,7 +666,7 @@ fn nodeToCompletion(
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const func = ast.fnProto(tree, node, &buf).?;
+ const func = tree.fullFnProto(&buf, node).?;
if (func.name_token) |name_token| {
const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets;
const insert_text = if (use_snippets) blk: {
@@ -692,7 +692,7 @@ fn nodeToCompletion(
.aligned_var_decl,
.simple_var_decl,
=> {
- const var_decl = ast.varDecl(tree, node).?;
+ const var_decl = tree.fullVarDecl(node).?;
const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const;
if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| {
@@ -717,7 +717,7 @@ fn nodeToCompletion(
.container_field_align,
.container_field_init,
=> {
- const field = ast.containerField(tree, node).?;
+ const field = tree.fullContainerField(node).?;
try list.append(allocator, .{
.label = handle.tree.tokenSlice(field.ast.main_token),
.kind = if (field.ast.tuple_like) .Enum else .Field,
@@ -743,7 +743,7 @@ fn nodeToCompletion(
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
- const ptr_type = ast.ptrType(tree, node).?;
+ const ptr_type = ast.fullPtrType(tree, node).?;
switch (ptr_type.size) {
.One, .C, .Many => if (server.config.operator_completions) {
@@ -814,12 +814,12 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
if (pos_index + 1 >= handle.text.len) return "";
var start_idx = pos_index;
- while (start_idx > 0 and isSymbolChar(handle.text[start_idx - 1])) {
+ while (start_idx > 0 and analysis.isSymbolChar(handle.text[start_idx - 1])) {
start_idx -= 1;
}
var end_idx = pos_index;
- while (end_idx < handle.text.len and isSymbolChar(handle.text[end_idx])) {
+ while (end_idx < handle.text.len and analysis.isSymbolChar(handle.text[end_idx])) {
end_idx += 1;
}
@@ -827,10 +827,6 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
return handle.text[start_idx..end_idx];
}
-fn isSymbolChar(char: u8) bool {
- return std.ascii.isAlphanumeric(char) or char == '_';
-}
-
fn gotoDefinitionSymbol(
server: *Server,
decl_handle: analysis.DeclWithHandle,
@@ -881,11 +877,11 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
var buf: [1]Ast.Node.Index = undefined;
- if (ast.varDecl(tree, node)) |var_decl| {
+ if (tree.fullVarDecl(node)) |var_decl| {
break :def analysis.getVariableSignature(tree, var_decl);
- } else if (ast.fnProto(tree, node, &buf)) |fn_proto| {
+ } else if (tree.fullFnProto(&buf, node)) |fn_proto| {
break :def analysis.getFunctionSignature(tree, fn_proto);
- } else if (ast.containerField(tree, node)) |field| {
+ } else if (tree.fullContainerField(node)) |field| {
break :def analysis.getContainerFieldSignature(tree, field);
} else {
break :def analysis.nodeToString(tree, node) orelse return null;
@@ -1303,16 +1299,15 @@ fn completeBuiltin(server: *Server) error{OutOfMemory}!?[]types.CompletionItem {
});
}
- var completions = try allocator.alloc(types.CompletionItem, builtin_completions.items.len);
+ var completions = try builtin_completions.clone(allocator);
if (server.client_capabilities.label_details_support) {
- for (builtin_completions.items) |item, i| {
- completions[i] = item;
- try formatDetailledLabel(&completions[i], allocator);
+ for (completions.items) |*item| {
+ try formatDetailledLabel(item, allocator);
}
}
- return completions;
+ return completions.items;
}
fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}![]types.CompletionItem {
@@ -2102,7 +2097,7 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
}
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
- const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
+ const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, false);
const maybe_completions = switch (pos_context) {
.builtin => try server.completeBuiltin(),
@@ -2126,6 +2121,29 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
const completions = maybe_completions orelse return null;
+ // The cursor is in the middle of a word or before a @, so we can replace
+ // the remaining identifier with the completion instead of just inserting.
+ // TODO Identify function call/struct init and replace the whole thing.
+ const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
+ if (server.client_capabilities.supports_apply_edits and pos_context.loc() != null and lookahead_context.loc() != null and pos_context.loc().?.end != lookahead_context.loc().?.end) {
+ var end = lookahead_context.loc().?.end;
+ while (end < handle.text.len and (std.ascii.isAlphanumeric(handle.text[end]) or handle.text[end] == '"')) {
+ end += 1;
+ }
+
+ const replaceLoc = offsets.Loc{ .start = lookahead_context.loc().?.start, .end = end };
+ const replaceRange = offsets.locToRange(handle.text, replaceLoc, server.offset_encoding);
+
+ for (completions) |*item| {
+ item.textEdit = .{
+ .TextEdit = .{
+ .newText = item.insertText orelse item.label,
+ .range = replaceRange,
+ },
+ };
+ }
+ }
+
// truncate completions
for (completions) |*item| {
if (item.detail) |det| {
@@ -2183,7 +2201,7 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol
if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
- const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
+ const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
return switch (pos_context) {
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
@@ -2223,7 +2241,7 @@ fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Hover
if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
- const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
+ const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
const response = switch (pos_context) {
.builtin => try server.hoverDefinitionBuiltin(source_index, handle),
@@ -2369,7 +2387,7 @@ fn generalReferencesHandler(server: *Server, request: GeneralReferencesRequest)
if (request.position().character <= 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding);
- const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
+ const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
const decl = switch (pos_context) {
.var_access => try server.getSymbolGlobal(source_index, handle),
@@ -2486,6 +2504,9 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
}
fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]types.CodeAction {
+ const tracy_zone = tracy.trace(@src());
+ defer tracy_zone.end();
+
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
var builder = code_actions.Builder{
@@ -2514,6 +2535,9 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]
}
fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error!?[]types.FoldingRange {
+ const tracy_zone = tracy.trace(@src());
+ defer tracy_zone.end();
+
const Token = std.zig.Token;
const Node = Ast.Node;
const allocator = server.arena.allocator();
@@ -2521,37 +2545,26 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const helper = struct {
const Inclusivity = enum { inclusive, exclusive };
- /// Returns true if added.
- fn maybeAddTokRange(
- p_ranges: *std.ArrayList(types.FoldingRange),
- tree: Ast,
- start: Ast.TokenIndex,
- end: Ast.TokenIndex,
- end_reach: Inclusivity,
- encoding: offsets.Encoding,
- ) std.mem.Allocator.Error!bool {
- const can_add = start < end and !tree.tokensOnSameLine(start, end);
- if (can_add) {
- try addTokRange(p_ranges, tree, start, end, end_reach, encoding);
- }
- return can_add;
- }
+
fn addTokRange(
p_ranges: *std.ArrayList(types.FoldingRange),
tree: Ast,
start: Ast.TokenIndex,
end: Ast.TokenIndex,
end_reach: Inclusivity,
- encoding: offsets.Encoding,
) std.mem.Allocator.Error!void {
- std.debug.assert(!std.debug.runtime_safety or !tree.tokensOnSameLine(start, end));
+ if (tree.tokensOnSameLine(start, end)) return;
+ std.debug.assert(start <= end);
- const start_line = offsets.tokenToPosition(tree, start, encoding).line;
- const end_line = offsets.tokenToPosition(tree, end, encoding).line;
+ const start_index = offsets.tokenToIndex(tree, start);
+ const end_index = offsets.tokenToIndex(tree, end);
+
+ const start_line = std.mem.count(u8, tree.source[0..start_index], "\n");
+ const end_line = start_line + std.mem.count(u8, tree.source[start_index..end_index], "\n");
try p_ranges.append(.{
- .startLine = start_line,
- .endLine = end_line - @boolToInt(end_reach == .exclusive),
+ .startLine = @intCast(u32, start_line),
+ .endLine = @intCast(u32, end_line) - @boolToInt(end_reach == .exclusive),
});
}
};
@@ -2582,21 +2595,24 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const node = @intCast(Node.Index, i);
switch (node_tag) {
+ .root => continue,
// only fold the expression pertaining to the if statement, and the else statement, each respectively.
// TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures.
- .@"if", .if_simple => {
- const if_full = ast.ifFull(handle.tree, node);
+ .@"if",
+ .if_simple,
+ => {
+ const if_full = ast.fullIf(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (if_full.ast.else_expr == 0) continue;
const start_tok_2 = if_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
},
// same as if/else
@@ -2606,17 +2622,17 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
.while_cont,
.while_simple,
=> {
- const loop_full = ast.whileAst(handle.tree, node).?;
+ const loop_full = ast.fullWhile(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (loop_full.ast.else_expr == 0) continue;
const start_tok_2 = loop_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
},
.global_var_decl,
@@ -2644,22 +2660,19 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
start_doc_tok -= 1;
}
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive);
}
// Function prototype folding regions
- var fn_proto_buffer: [1]Node.Index = undefined;
- const fn_proto = ast.fnProto(handle.tree, node, fn_proto_buffer[0..]) orelse
+ var buffer: [1]Node.Index = undefined;
+ const fn_proto = handle.tree.fullFnProto(&buffer, node) orelse
break :decl_node_blk;
const list_start_tok: Ast.TokenIndex = fn_proto.lparen;
const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node);
if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk;
- try ranges.ensureUnusedCapacity(1 + fn_proto.ast.params.len); // best guess, doesn't include anytype params
- helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive, server.offset_encoding) catch |err| switch (err) {
- error.OutOfMemory => unreachable,
- };
+ try helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive);
var it = fn_proto.iterate(&handle.tree);
while (ast.nextFnParam(&it)) |param| {
@@ -2669,7 +2682,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
while (token_tags[doc_end_tok + 1] == .doc_comment)
doc_end_tok += 1;
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive);
}
},
@@ -2681,7 +2694,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
=> {
const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive);
},
// most other trivial cases can go through here.
@@ -2728,7 +2741,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node);
- _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive, server.offset_encoding);
+ try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive);
},
}
}
@@ -2777,6 +2790,9 @@ pub const SelectionRange = struct {
};
fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) Error!?[]*SelectionRange {
+ const tracy_zone = tracy.trace(@src());
+ defer tracy_zone.end();
+
const allocator = server.arena.allocator();
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
diff --git a/src/analysis.zig b/src/analysis.zig
index 185342e..8a6868e 100644
--- a/src/analysis.zig
+++ b/src/analysis.zig
@@ -194,7 +194,6 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
if (param.type_expr == 0) return false;
const token_starts = tree.tokens.items(.start);
- const token_data = tree.nodes.items(.data);
const in_container = innermostContainer(handle, token_starts[func.ast.fn_token]);
if (try resolveTypeOfNode(document_store, arena, .{
@@ -205,9 +204,9 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
return true;
}
- if (ast.isPtrType(tree, param.type_expr)) {
+ if (ast.fullPtrType(tree, param.type_expr)) |ptr_type| {
if (try resolveTypeOfNode(document_store, arena, .{
- .node = token_data[param.type_expr].rhs,
+ .node = ptr_type.ast.child_type,
.handle = handle,
})) |resolved_prefix_op| {
if (std.meta.eql(in_container, resolved_prefix_op))
@@ -280,7 +279,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
.global_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => ast.varDecl(tree, node).?.ast.mut_token + 1,
+ => tree.fullVarDecl(node).?.ast.mut_token + 1,
// function declaration names
.fn_proto,
.fn_proto_multi,
@@ -289,7 +288,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
.fn_decl,
=> blk: {
var params: [1]Ast.Node.Index = undefined;
- break :blk ast.fnProto(tree, node, ¶ms).?.name_token;
+ break :blk tree.fullFnProto(¶ms, node).?.name_token;
},
// containers
@@ -297,7 +296,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
.container_field_init,
.container_field_align,
=> {
- const field = ast.containerField(tree, node).?.ast;
+ const field = tree.fullContainerField(node).?.ast;
return field.main_token;
},
@@ -381,7 +380,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
- if (ast.varDecl(handle.tree, decl)) |var_decl| {
+ if (handle.tree.fullVarDecl(decl)) |var_decl| {
if (var_decl.ast.init_node == 0) return null;
const base_exp = var_decl.ast.init_node;
if (token_tags[var_decl.ast.mut_token] != .keyword_const) return null;
@@ -411,8 +410,9 @@ fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.N
if (node_tags[child_idx] == .@"return") {
if (datas[child_idx].lhs != 0) {
const lhs = datas[child_idx].lhs;
- if (ast.isCall(tree, lhs)) {
- const call_name = getDeclName(tree, datas[lhs].lhs);
+ var buf: [1]Ast.Node.Index = undefined;
+ if (tree.fullCall(&buf, lhs)) |call| {
+ const call_name = getDeclName(tree, call.ast.fn_expr);
if (call_name) |name| {
if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) {
continue;
@@ -528,8 +528,7 @@ fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, dere
const main_token = tree.nodes.items(.main_token)[deref_node];
const token_tag = tree.tokens.items(.tag)[main_token];
- if (ast.isPtrType(tree, deref_node)) {
- const ptr_type = ast.ptrType(tree, deref_node).?;
+ if (ast.fullPtrType(tree, deref_node)) |ptr_type| {
switch (token_tag) {
.asterisk => {
return ((try resolveTypeOfNodeInternal(store, arena, .{
@@ -566,7 +565,7 @@ fn resolveBracketAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocat
.type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false },
.handle = lhs.handle,
};
- } else if (ast.ptrType(tree, lhs_node)) |ptr_type| {
+ } else if (ast.fullPtrType(tree, lhs_node)) |ptr_type| {
if (ptr_type.size == .Slice) {
if (rhs == .Single) {
return ((try resolveTypeOfNodeInternal(store, arena, .{
@@ -647,7 +646,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
.simple_var_decl,
.aligned_var_decl,
=> {
- const var_decl = ast.varDecl(tree, node).?;
+ const var_decl = tree.fullVarDecl(node).?;
if (var_decl.ast.type_node != 0) {
const decl_type = .{ .node = var_decl.ast.type_node, .handle = handle };
if (try resolveTypeOfNodeInternal(store, arena, decl_type, bound_type_params)) |typ|
@@ -679,7 +678,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
switch (child.decl.*) {
.ast_node => |n| {
if (n == node) return null;
- if (ast.varDecl(child.handle.tree, n)) |var_decl| {
+ if (child.handle.tree.fullVarDecl(n)) |var_decl| {
if (var_decl.ast.init_node == node)
return null;
}
@@ -700,7 +699,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
.async_call_one_comma,
=> {
var params: [1]Ast.Node.Index = undefined;
- const call = ast.callFull(tree, node, ¶ms) orelse unreachable;
+ const call = tree.fullCall(¶ms, node) orelse unreachable;
const callee = .{ .node = call.ast.fn_expr, .handle = handle };
const decl = (try resolveTypeOfNodeInternal(store, arena, callee, bound_type_params)) orelse
@@ -712,7 +711,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
else => return null,
};
var buf: [1]Ast.Node.Index = undefined;
- const func_maybe = ast.fnProto(decl.handle.tree, decl_node, &buf);
+ const func_maybe = decl.handle.tree.fullFnProto(&buf, decl_node);
if (func_maybe) |fn_decl| {
var expected_params = fn_decl.ast.params.len;
@@ -981,11 +980,11 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
};
const new_handle = store.getOrLoadHandle(builtin_uri) orelse return null;
- const root_scope = new_handle.document_scope.scopes.items[0];
- const decl = root_scope.decls.get("Type") orelse return null;
+ const root_scope_decls = new_handle.document_scope.scopes.items(.decls)[0];
+ const decl = root_scope_decls.get("Type") orelse return null;
if (decl != .ast_node) return null;
- const var_decl = ast.varDecl(new_handle.tree, decl.ast_node) orelse return null;
+ const var_decl = new_handle.tree.fullVarDecl(decl.ast_node) orelse return null;
return TypeWithHandle{
.type = .{
@@ -1025,7 +1024,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
=> {
var buf: [1]Ast.Node.Index = undefined;
// This is a function type
- if (ast.fnProto(tree, node, &buf).?.name_token == null) {
+ if (tree.fullFnProto(&buf, node).?.name_token == null) {
return TypeWithHandle.typeVal(node_handle);
}
@@ -1113,11 +1112,10 @@ pub const TypeWithHandle = struct {
const tree = self.handle.tree;
const node = self.type.data.other;
const tags = tree.nodes.items(.tag);
- if (ast.isContainer(tree, node)) {
- var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(tree, node, &buf)) |child| {
- if (tags[child].isContainerField()) return false;
- }
+ var buf: [2]Ast.Node.Index = undefined;
+ const full = tree.fullContainerDecl(&buf, node) orelse return true;
+ for (full.ast.members) |member| {
+ if (tags[member].isContainerField()) return false;
}
return true;
}
@@ -1138,7 +1136,7 @@ pub const TypeWithHandle = struct {
var buf: [1]Ast.Node.Index = undefined;
const tree = self.handle.tree;
return switch (self.type.data) {
- .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: {
+ .other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: {
break :blk isTypeFunction(tree, fn_proto);
} else false,
else => false,
@@ -1149,7 +1147,7 @@ pub const TypeWithHandle = struct {
var buf: [1]Ast.Node.Index = undefined;
const tree = self.handle.tree;
return switch (self.type.data) {
- .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: {
+ .other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: {
break :blk isGenericFunction(tree, fn_proto);
} else false,
else => false,
@@ -1349,7 +1347,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
if (current_type.?.type.is_type_val) return null;
const cur_tree = current_type.?.handle.tree;
var buf: [1]Ast.Node.Index = undefined;
- if (ast.fnProto(cur_tree, current_type_node, &buf)) |func| {
+ if (cur_tree.fullFnProto(&buf, current_type_node)) |func| {
// Check if the function has a body and if so, pass it
// so the type can be resolved if it's a generic function returning
// an anonymous struct
@@ -1414,13 +1412,13 @@ pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool {
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => ast.varDecl(tree, node).?.visib_token != null,
+ => tree.fullVarDecl(node).?.visib_token != null,
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
- => ast.fnProto(tree, node, &buf).?.visib_token != null,
+ => tree.fullFnProto(&buf, node).?.visib_token != null,
else => true,
};
}
@@ -1434,7 +1432,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
.container_field_init,
.container_field_align,
=> {
- const field = ast.containerField(tree, node).?.ast;
+ const field = tree.fullContainerField(node).?.ast;
return if (field.tuple_like) null else tree.tokenSlice(field.main_token);
},
.error_value => tree.tokenSlice(data[node].rhs),
@@ -1444,7 +1442,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
- => if (ast.fnProto(tree, node, &buf).?.name_token) |name| tree.tokenSlice(name) else null,
+ => if (tree.fullFnProto(&buf, node).?.name_token) |name| tree.tokenSlice(name) else null,
.field_access => tree.tokenSlice(data[node].rhs),
.call,
.call_comma,
@@ -1471,16 +1469,16 @@ fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize)
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
const node_tags = tree.nodes.items(.tag);
+
var buf: [2]Ast.Node.Index = undefined;
- if (ast.isContainer(tree, node)) {
- const decls = ast.declMembers(tree, node, &buf);
- for (decls) |decl_idx| {
+ if (tree.fullContainerDecl(&buf, node)) |container_decl| {
+ for (container_decl.ast.members) |decl_idx| {
if (getImportStr(tree, decl_idx, source_index)) |name| {
return name;
}
}
return null;
- } else if (ast.varDecl(tree, node)) |var_decl| {
+ } else if (tree.fullVarDecl(node)) |var_decl| {
return getImportStr(tree, var_decl.ast.init_node, source_index);
} else if (node_tags[node] == .@"usingnamespace") {
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
@@ -1559,9 +1557,32 @@ fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc {
};
}
-pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_index: usize) !PositionContext {
- const line_loc = offsets.lineLocUntilIndex(text, doc_index);
+pub fn isSymbolChar(char: u8) bool {
+ return std.ascii.isAlphanumeric(char) or char == '_';
+}
+
+/// Given a byte index in a document (typically cursor offset), classify what kind of entity is at that index.
+///
+/// Classification is based on the lexical structure -- we fetch the line containin index, tokenize it,
+/// and look at the sequence of tokens just before the cursor. Due to the nice way zig is designed (only line
+/// comments, etc) lexing just a single line is always correct.
+pub fn getPositionContext(
+ allocator: std.mem.Allocator,
+ text: []const u8,
+ doc_index: usize,
+ /// Should we look to the end of the current context? Yes for goto def, no for completions
+ lookahead: bool,
+) !PositionContext {
+ var new_index = doc_index;
+ if (lookahead and new_index < text.len and isSymbolChar(text[new_index])) {
+ new_index += 1;
+ } else if (lookahead and new_index + 1 < text.len and text[new_index] == '@') {
+ new_index += 2;
+ }
+
+ const line_loc = if (!lookahead) offsets.lineLocAtIndex(text, new_index) else offsets.lineLocUntilIndex(text, new_index);
const line = offsets.locToSlice(text, line_loc);
+ const prev_char = if (new_index > 0) text[new_index - 1] else 0;
const is_comment = std.mem.startsWith(u8, std.mem.trimLeft(u8, line, " \t"), "//");
if (is_comment) return .comment;
@@ -1582,10 +1603,16 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in
while (true) {
const tok = tokenizer.next();
// Early exits.
+ if (tok.loc.start > new_index) break;
+ if (tok.loc.start == new_index) {
+ // Tie-breaking, the curosr is exactly between two tokens, and
+ // `tok` is the latter of the two.
+ if (tok.tag != .identifier) break;
+ }
switch (tok.tag) {
.invalid => {
// Single '@' do not return a builtin token so we check this on our own.
- if (line[line.len - 1] == '@') {
+ if (prev_char == '@') {
return PositionContext{
.builtin = .{
.start = line_loc.end - 1,
@@ -1691,7 +1718,7 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in
.label => |filled| {
// We need to check this because the state could be a filled
// label if only a space follows it
- if (!filled or line[line.len - 1] != ' ') {
+ if (!filled or prev_char != ' ') {
return state.ctx;
}
},
@@ -1845,7 +1872,8 @@ fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Inde
.tagged_union_two_trailing,
=> {
var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(tree, child, &buf)) |member|
+ const members = tree.fullContainerDecl(&buf, child).?.ast.members;
+ for (members) |member|
try addOutlineNodes(allocator, tree, member, context);
return;
},
@@ -1904,20 +1932,18 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast
.encoding = context.encoding,
};
- if (ast.isContainer(tree, node)) {
- var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(tree, node, &buf)) |child|
+ var buf: [2]Ast.Node.Index = undefined;
+ if (tree.fullContainerDecl(&buf, node)) |container_decl| {
+ for (container_decl.ast.members) |child| {
try addOutlineNodes(allocator, tree, child, &child_context);
- }
-
- if (ast.varDecl(tree, node)) |var_decl| {
+ }
+ } else if (tree.fullVarDecl(node)) |var_decl| {
if (var_decl.ast.init_node != 0)
try addOutlineNodes(allocator, tree, var_decl.ast.init_node, &child_context);
- }
- if (tags[node] == .fn_decl) fn_ch: {
+ } else if (tags[node] == .fn_decl) fn_ch: {
const fn_decl = tree.nodes.items(.data)[node];
var params: [1]Ast.Node.Index = undefined;
- const fn_proto = ast.fnProto(tree, fn_decl.lhs, ¶ms) orelse break :fn_ch;
+ const fn_proto = tree.fullFnProto(¶ms, fn_decl.lhs).?;
if (!isTypeFunction(tree, fn_proto)) break :fn_ch;
const ret_stmt = findReturnStatement(tree, fn_proto, fn_decl.rhs) orelse break :fn_ch;
const type_decl = tree.nodes.items(.data)[ret_stmt].lhs;
@@ -2066,26 +2092,27 @@ pub const DeclWithHandle = struct {
if (!switch_expr_type.isUnionType())
return null;
- if (node_tags[pay.items[0]] == .enum_literal) {
- const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null;
- if (scope.decls.getEntry(tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| {
- switch (candidate.value_ptr.*) {
- .ast_node => |node| {
- if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| {
- if (container_field.ast.type_expr != 0) {
- return ((try resolveTypeOfNodeInternal(
- store,
- arena,
- .{ .node = container_field.ast.type_expr, .handle = switch_expr_type.handle },
- bound_type_params,
- )) orelse return null).instanceTypeVal();
- }
- }
- },
- else => {},
+ if (node_tags[pay.items[0]] != .enum_literal) return null;
+
+ const scope_index = findContainerScopeIndex(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null;
+ const scope_decls = switch_expr_type.handle.document_scope.scopes.items(.decls);
+
+ const candidate = scope_decls[scope_index].getEntry(tree.tokenSlice(main_tokens[pay.items[0]])) orelse return null;
+
+ switch (candidate.value_ptr.*) {
+ .ast_node => |node| {
+ if (switch_expr_type.handle.tree.fullContainerField(node)) |container_field| {
+ if (container_field.ast.type_expr != 0) {
+ return ((try resolveTypeOfNodeInternal(
+ store,
+ arena,
+ .{ .node = container_field.ast.type_expr, .handle = switch_expr_type.handle },
+ bound_type_params,
+ )) orelse return null).instanceTypeVal();
+ }
}
- return null;
- }
+ },
+ else => {},
}
return null;
},
@@ -2094,17 +2121,16 @@ pub const DeclWithHandle = struct {
}
};
-fn findContainerScope(container_handle: NodeWithHandle) ?*Scope {
+fn findContainerScopeIndex(container_handle: NodeWithHandle) ?usize {
const container = container_handle.node;
const handle = container_handle.handle;
if (!ast.isContainer(handle.tree, container)) return null;
- // Find the container scope.
- return for (handle.document_scope.scopes.items) |*scope| {
- switch (scope.data) {
+ return for (handle.document_scope.scopes.items(.data)) |data, scope_index| {
+ switch (data) {
.container => |node| if (node == container) {
- break scope;
+ break scope_index;
},
else => {},
}
@@ -2122,9 +2148,11 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
const is_enum = token_tags[main_token] == .keyword_enum;
- const container_scope = findContainerScope(container_handle) orelse return;
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+ const scope_uses = handle.document_scope.scopes.items(.uses);
+ const container_scope_index = findContainerScopeIndex(container_handle) orelse return;
- var decl_it = container_scope.decls.iterator();
+ var decl_it = scope_decls[container_scope_index].iterator();
while (decl_it.next()) |entry| {
switch (entry.value_ptr.*) {
.ast_node => |node| {
@@ -2148,7 +2176,7 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
try callback(context, decl);
}
- for (container_scope.uses.items) |use| {
+ for (scope_uses[container_scope_index].items) |use| {
const use_token = tree.nodes.items(.main_token)[use];
const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub;
if (handle != orig_handle and !is_pub) continue;
@@ -2178,65 +2206,95 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
}
}
+pub const EnclosingScopeIterator = struct {
+ scope_locs: []offsets.Loc,
+ current_scope: usize,
+ source_index: usize,
+
+ pub fn next(self: *EnclosingScopeIterator) ?usize {
+ while (self.current_scope < self.scope_locs.len) : (self.current_scope += 1) {
+ const scope_loc = self.scope_locs[self.current_scope];
+
+ if (self.source_index >= scope_loc.start and self.source_index <= scope_loc.end) {
+ defer self.current_scope += 1;
+ return self.current_scope;
+ }
+ if (scope_loc.start >= self.source_index) {
+ self.current_scope = self.scope_locs.len;
+ return null;
+ }
+ }
+ return null;
+ }
+};
+
+pub fn iterateEnclosingScopes(document_scope: DocumentScope, source_index: usize) EnclosingScopeIterator {
+ return .{
+ .scope_locs = document_scope.scopes.items(.loc),
+ .current_scope = 0,
+ .source_index = source_index,
+ };
+}
+
pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *const DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void {
var use_trail = std.ArrayList(Ast.Node.Index).init(arena.allocator());
return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail);
}
pub fn iterateLabels(handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
- for (handle.document_scope.scopes.items) |scope| {
- if (source_index >= scope.loc.start and source_index < scope.loc.end) {
- var decl_it = scope.decls.iterator();
- while (decl_it.next()) |entry| {
- switch (entry.value_ptr.*) {
- .label_decl => {},
- else => continue,
- }
- try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+
+ var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
+ while (scope_iterator.next()) |scope_index| {
+ var decl_it = scope_decls[scope_index].iterator();
+ while (decl_it.next()) |entry| {
+ switch (entry.value_ptr.*) {
+ .label_decl => {},
+ else => continue,
}
+ try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
}
- if (scope.loc.start >= source_index) return;
}
}
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
- for (handle.document_scope.scopes.items) |scope| {
- if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
- var decl_it = scope.decls.iterator();
- while (decl_it.next()) |entry| {
- if (entry.value_ptr.* == .ast_node and
- handle.tree.nodes.items(.tag)[entry.value_ptr.*.ast_node].isContainerField()) continue;
- if (entry.value_ptr.* == .label_decl) continue;
- try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
- }
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+ const scope_uses = handle.document_scope.scopes.items(.uses);
- for (scope.uses.items) |use| {
- if (std.mem.indexOfScalar(Ast.Node.Index, use_trail.items, use) != null) continue;
- try use_trail.append(use);
-
- const use_expr = (try resolveTypeOfNode(
- store,
- arena,
- .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle },
- )) orelse continue;
- const use_expr_node = switch (use_expr.type.data) {
- .other => |n| n,
- else => continue,
- };
- try iterateSymbolsContainerInternal(
- store,
- arena,
- .{ .node = use_expr_node, .handle = use_expr.handle },
- handle,
- callback,
- context,
- false,
- use_trail,
- );
- }
+ var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
+ while (scope_iterator.next()) |scope_index| {
+ var decl_it = scope_decls[scope_index].iterator();
+ while (decl_it.next()) |entry| {
+ if (entry.value_ptr.* == .ast_node and
+ handle.tree.nodes.items(.tag)[entry.value_ptr.*.ast_node].isContainerField()) continue;
+ if (entry.value_ptr.* == .label_decl) continue;
+ try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
}
- if (scope.loc.start >= source_index) return;
+ for (scope_uses[scope_index].items) |use| {
+ if (std.mem.indexOfScalar(Ast.Node.Index, use_trail.items, use) != null) continue;
+ try use_trail.append(use);
+
+ const use_expr = (try resolveTypeOfNode(
+ store,
+ arena,
+ .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle },
+ )) orelse continue;
+ const use_expr_node = switch (use_expr.type.data) {
+ .other => |n| n,
+ else => continue,
+ };
+ try iterateSymbolsContainerInternal(
+ store,
+ arena,
+ .{ .node = use_expr_node, .handle = use_expr.handle },
+ handle,
+ callback,
+ context,
+ false,
+ use_trail,
+ );
+ }
}
}
@@ -2246,42 +2304,50 @@ pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocat
}
pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usize) usize {
- if (handle.document_scope.scopes.items.len == 1) return 0;
+ if (handle.document_scope.scopes.len == 1) return 0;
- var current: usize = 0;
- for (handle.document_scope.scopes.items[1..]) |*scope, idx| {
- if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
- switch (scope.data) {
- .container, .function, .block => current = idx + 1,
+ const scope_locs = handle.document_scope.scopes.items(.loc);
+ const scope_datas = handle.document_scope.scopes.items(.data);
+
+ var innermost: usize = 0;
+ var scope_index: usize = 1;
+ while (scope_index < handle.document_scope.scopes.len) : (scope_index += 1) {
+ const scope_loc = scope_locs[scope_index];
+ if (source_index >= scope_loc.start and source_index <= scope_loc.end) {
+ switch (scope_datas[scope_index]) {
+ .container, .function, .block => innermost = scope_index,
else => {},
}
}
- if (scope.loc.start > source_index) break;
+ if (scope_loc.start > source_index) break;
}
- return current;
+ return innermost;
}
pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) Ast.Node.Index {
- return handle.document_scope.scopes.items[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?;
+ const scope_index = innermostBlockScopeIndex(handle, source_index);
+ return handle.document_scope.scopes.items(.data)[scope_index].toNodeIndex().?;
}
pub fn innermostContainer(handle: *const DocumentStore.Handle, source_index: usize) TypeWithHandle {
- var current = handle.document_scope.scopes.items[0].data.container;
- if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
+ const scope_datas = handle.document_scope.scopes.items(.data);
- for (handle.document_scope.scopes.items[1..]) |scope| {
- if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
- switch (scope.data) {
- .container => |node| current = node,
- else => {},
- }
+ var current = scope_datas[0].container;
+ if (handle.document_scope.scopes.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
+
+ var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
+ while (scope_iterator.next()) |scope_index| {
+ switch (scope_datas[scope_index]) {
+ .container => |node| current = node,
+ else => {},
}
- if (scope.loc.start > source_index) break;
}
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
}
fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const Ast.Node.Index, symbol: []const u8, handle: *const DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle {
+ if (uses.len == 0) return null;
+
// If we were asked to resolve this symbol before,
// it is self-referential and we cannot resolve it.
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
@@ -2314,20 +2380,20 @@ fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []co
}
pub fn lookupLabel(handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
- for (handle.document_scope.scopes.items) |scope| {
- if (source_index >= scope.loc.start and source_index < scope.loc.end) {
- if (scope.decls.getEntry(symbol)) |candidate| {
- switch (candidate.value_ptr.*) {
- .label_decl => {},
- else => continue,
- }
- return DeclWithHandle{
- .decl = candidate.value_ptr,
- .handle = handle,
- };
- }
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+
+ var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
+ while (scope_iterator.next()) |scope_index| {
+ const candidate = scope_decls[scope_index].getEntry(symbol) orelse continue;
+
+ switch (candidate.value_ptr.*) {
+ .label_decl => {},
+ else => continue,
}
- if (scope.loc.start > source_index) return null;
+ return DeclWithHandle{
+ .decl = candidate.value_ptr,
+ .handle = handle,
+ };
}
return null;
}
@@ -2335,11 +2401,15 @@ pub fn lookupLabel(handle: *const DocumentStore.Handle, symbol: []const u8, sour
pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
const innermost_scope_idx = innermostBlockScopeIndex(handle.*, source_index);
+ const scope_locs = handle.document_scope.scopes.items(.loc);
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+ const scope_uses = handle.document_scope.scopes.items(.uses);
+
var curr = innermost_scope_idx;
while (curr >= 0) : (curr -= 1) {
- const scope = &handle.document_scope.scopes.items[curr];
- if (source_index >= scope.loc.start and source_index <= scope.loc.end) blk: {
- if (scope.decls.getEntry(symbol)) |candidate| {
+ const scope_loc = scope_locs[curr];
+ if (source_index >= scope_loc.start and source_index <= scope_loc.end) blk: {
+ if (scope_decls[curr].getEntry(symbol)) |candidate| {
switch (candidate.value_ptr.*) {
.ast_node => |node| {
if (handle.tree.nodes.items(.tag)[node].isContainerField()) break :blk;
@@ -2352,7 +2422,7 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator
.handle = handle,
};
}
- if (try resolveUse(store, arena, scope.uses.items, symbol, handle)) |result| return result;
+ if (try resolveUse(store, arena, scope_uses[curr].items, symbol, handle)) |result| return result;
}
if (curr == 0) break;
}
@@ -2376,9 +2446,11 @@ pub fn lookupSymbolContainer(
const main_token = tree.nodes.items(.main_token)[container];
const is_enum = token_tags[main_token] == .keyword_enum;
+ const scope_decls = handle.document_scope.scopes.items(.decls);
+ const scope_uses = handle.document_scope.scopes.items(.uses);
- if (findContainerScope(container_handle)) |container_scope| {
- if (container_scope.decls.getEntry(symbol)) |candidate| {
+ if (findContainerScopeIndex(container_handle)) |container_scope_index| {
+ if (scope_decls[container_scope_index].getEntry(symbol)) |candidate| {
switch (candidate.value_ptr.*) {
.ast_node => |node| {
if (node_tags[node].isContainerField()) {
@@ -2392,8 +2464,7 @@ pub fn lookupSymbolContainer(
return DeclWithHandle{ .decl = candidate.value_ptr, .handle = handle };
}
- if (try resolveUse(store, arena, container_scope.uses.items, symbol, handle)) |result| return result;
- return null;
+ if (try resolveUse(store, arena, scope_uses[container_scope_index].items, symbol, handle)) |result| return result;
}
return null;
@@ -2423,15 +2494,19 @@ comptime {
}
pub const DocumentScope = struct {
- scopes: std.ArrayListUnmanaged(Scope),
+ scopes: std.MultiArrayList(Scope),
error_completions: CompletionSet,
enum_completions: CompletionSet,
pub fn deinit(self: *DocumentScope, allocator: std.mem.Allocator) void {
- for (self.scopes.items) |*scope| {
- scope.deinit(allocator);
+ var i: usize = 0;
+ while (i < self.scopes.len) : (i += 1) {
+ self.scopes.items(.decls)[i].deinit(allocator);
+ self.scopes.items(.tests)[i].deinit(allocator);
+ self.scopes.items(.uses)[i].deinit(allocator);
}
self.scopes.deinit(allocator);
+
for (self.error_completions.entries.items(.key)) |item| {
if (item.detail) |detail| allocator.free(detail);
switch (item.documentation orelse continue) {
@@ -2457,6 +2532,13 @@ pub const Scope = struct {
function: Ast.Node.Index, // .tag is FnProto
block: Ast.Node.Index, // .tag is Block
other,
+
+ pub fn toNodeIndex(self: Data) ?Ast.Node.Index {
+ return switch (self) {
+ .container, .function, .block => |idx| idx,
+ else => null,
+ };
+ }
};
loc: offsets.Loc,
@@ -2464,19 +2546,6 @@ pub const Scope = struct {
tests: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
uses: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
data: Data,
-
- pub fn deinit(self: *Scope, allocator: std.mem.Allocator) void {
- self.decls.deinit(allocator);
- self.tests.deinit(allocator);
- self.uses.deinit(allocator);
- }
-
- pub fn toNodeIndex(self: Scope) ?Ast.Node.Index {
- return switch (self.data) {
- .container, .function, .block => |idx| idx,
- else => null,
- };
- }
};
pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope {
@@ -2500,7 +2569,7 @@ pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope
}
const ScopeContext = struct {
- scopes: *std.ArrayListUnmanaged(Scope),
+ scopes: *std.MultiArrayList(Scope),
enums: *CompletionSet,
errors: *CompletionSet,
tree: Ast,
@@ -2510,46 +2579,18 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
const scopes = context.scopes;
const tree = context.tree;
const tags = tree.nodes.items(.tag);
- const token_tags = tree.tokens.items(.tag);
- const data = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- const node_tag = tags[node_idx];
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = offsets.nodeToLoc(tree, node_idx),
.data = .{ .container = node_idx },
- };
- const scope_idx = scopes.items.len - 1;
-
- if (node_tag == .error_set_decl) {
- // All identifiers in main_token..data.lhs are error fields.
- var i = main_tokens[node_idx];
- while (i < data[node_idx].rhs) : (i += 1) {
- if (token_tags[i] == .identifier) {
- const name = offsets.tokenToSlice(tree, i);
- if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .error_token = i })) |_| {
- // TODO Record a redefinition error.
- }
- const gop = try context.errors.getOrPut(allocator, .{
- .label = name,
- .kind = .Constant,
- //.detail =
- .insertText = name,
- .insertTextFormat = .PlainText,
- });
- if (!gop.found_existing) {
- gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name});
- }
- }
- }
- }
+ });
+ const scope_index = scopes.len - 1;
var buf: [2]Ast.Node.Index = undefined;
- const ast_decls = ast.declMembers(tree, node_idx, &buf);
- for (ast_decls) |decl| {
+ const container_decl = tree.fullContainerDecl(&buf, node_idx).?;
+ for (container_decl.ast.members) |decl| {
if (tags[decl] == .@"usingnamespace") {
- try scopes.items[scope_idx].uses.append(allocator, decl);
+ try scopes.items(.uses)[scope_index].append(allocator, decl);
continue;
}
@@ -2557,17 +2598,14 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
const name = getDeclName(tree, decl) orelse continue;
if (tags[decl] == .test_decl) {
- try scopes.items[scope_idx].tests.append(allocator, decl);
+ try scopes.items(.tests)[scope_index].append(allocator, decl);
continue;
}
- if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = decl })) |existing| {
+ if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = decl })) |existing| {
_ = existing;
// TODO Record a redefinition error.
}
- var buffer: [2]Ast.Node.Index = undefined;
- const container_decl = ast.containerDecl(tree, node_idx, &buffer) orelse continue;
-
if (container_decl.ast.enum_token != null) {
if (std.mem.eql(u8, name, "_")) return;
const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation);
@@ -2619,10 +2657,37 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.root,
- .error_set_decl,
=> {
try makeInnerScope(allocator, context, node_idx);
},
+ .error_set_decl => {
+ try scopes.append(allocator, .{
+ .loc = offsets.nodeToLoc(tree, node_idx),
+ .data = .{ .container = node_idx },
+ });
+ const scope_index = scopes.len - 1;
+
+ // All identifiers in main_token..data.lhs are error fields.
+ var i = main_tokens[node_idx];
+ while (i < data[node_idx].rhs) : (i += 1) {
+ if (token_tags[i] == .identifier) {
+ const name = offsets.tokenToSlice(tree, i);
+ if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| {
+ // TODO Record a redefinition error.
+ }
+ const gop = try context.errors.getOrPut(allocator, .{
+ .label = name,
+ .kind = .Constant,
+ //.detail =
+ .insertText = name,
+ .insertTextFormat = .PlainText,
+ });
+ if (!gop.found_existing) {
+ gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name});
+ }
+ }
+ }
+ },
.array_type_sentinel => {
// TODO: ???
return;
@@ -2634,19 +2699,19 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.fn_decl,
=> |fn_tag| {
var buf: [1]Ast.Node.Index = undefined;
- const func = ast.fnProto(tree, node_idx, &buf).?;
+ const func = tree.fullFnProto(&buf, node_idx).?;
try scopes.append(allocator, .{
.loc = offsets.nodeToLoc(tree, node_idx),
.data = .{ .function = node_idx },
});
- const scope_idx = scopes.items.len - 1;
+ const scope_index = scopes.len - 1;
var it = func.iterate(&tree);
while (ast.nextFnParam(&it)) |param| {
// Add parameter decls
if (param.name_token) |name_token| {
- if (try scopes.items[scope_idx].decls.fetchPut(
+ if (try scopes.items(.decls)[scope_index].fetchPut(
allocator,
tree.tokenSlice(name_token),
.{ .param_payload = .{ .param = param, .func = node_idx } },
@@ -2684,15 +2749,15 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
// if labeled block
if (token_tags[first_token] == .identifier) {
- const scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, main_tokens[node_idx]),
.end = offsets.tokenToLoc(tree, last_token).start,
},
.data = .other,
- };
- try scope.decls.putNoClobber(allocator, tree.tokenSlice(first_token), .{ .label_decl = .{
+ });
+ const scope_index = scopes.len - 1;
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, tree.tokenSlice(first_token), .{ .label_decl = .{
.label = first_token,
.block = node_idx,
} });
@@ -2702,21 +2767,21 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.loc = offsets.nodeToLoc(tree, node_idx),
.data = .{ .container = node_idx },
});
- const scope_idx = scopes.items.len - 1;
+ const scope_index = scopes.len - 1;
var buffer: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(tree, node_idx, &buffer).?;
for (statements) |idx| {
if (tags[idx] == .@"usingnamespace") {
- try scopes.items[scope_idx].uses.append(allocator, idx);
+ try scopes.items(.uses)[scope_index].append(allocator, idx);
continue;
}
try makeScopeInternal(allocator, context, idx);
- if (ast.varDecl(tree, idx)) |var_decl| {
+ if (tree.fullVarDecl(idx)) |var_decl| {
const name = tree.tokenSlice(var_decl.ast.mut_token + 1);
- if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = idx })) |existing| {
+ if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = idx })) |existing| {
_ = existing;
// TODO record a redefinition error.
}
@@ -2728,23 +2793,23 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.@"if",
.if_simple,
=> {
- const if_node = ast.ifFull(tree, node_idx);
+ const if_node = ast.fullIf(tree, node_idx).?;
if (if_node.payload_token) |payload| {
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, payload),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.then_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
std.debug.assert(token_tags[name_token] == .identifier);
const name = tree.tokenSlice(name_token);
- try scope.decls.putNoClobber(allocator, name, .{
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{
.pointer_payload = .{
.name = name_token,
.condition = if_node.ast.cond_expr,
@@ -2757,17 +2822,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
if (if_node.ast.else_expr != 0) {
if (if_node.error_token) |err_token| {
std.debug.assert(token_tags[err_token] == .identifier);
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, err_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.else_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
const name = tree.tokenSlice(err_token);
- try scope.decls.putNoClobber(allocator, name, .{ .ast_node = if_node.ast.else_expr });
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = if_node.ast.else_expr });
}
try makeScopeInternal(allocator, context, if_node.ast.else_expr);
}
@@ -2778,21 +2843,21 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
const catch_token = main_tokens[node_idx];
const catch_expr = data[node_idx].rhs;
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, tree.firstToken(catch_expr)),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, catch_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
if (token_tags.len > catch_token + 2 and
token_tags[catch_token + 1] == .pipe and
token_tags[catch_token + 2] == .identifier)
{
const name = tree.tokenSlice(catch_token + 2);
- try scope.decls.putNoClobber(allocator, name, .{ .ast_node = catch_expr });
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = catch_expr });
}
try makeScopeInternal(allocator, context, catch_expr);
},
@@ -2802,41 +2867,41 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.@"for",
.for_simple,
=> {
- const while_node = ast.whileAst(tree, node_idx).?;
+ const while_node = ast.fullWhile(tree, node_idx).?;
const is_for = node_tag == .@"for" or node_tag == .for_simple;
if (while_node.label_token) |label| {
std.debug.assert(token_tags[label] == .identifier);
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, while_node.ast.while_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, node_idx)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
- try scope.decls.putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = .{
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = .{
.label = label,
.block = while_node.ast.then_expr,
} });
}
if (while_node.payload_token) |payload| {
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, payload),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.then_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
std.debug.assert(token_tags[name_token] == .identifier);
const name = tree.tokenSlice(name_token);
- try scope.decls.putNoClobber(allocator, name, if (is_for) .{
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, if (is_for) .{
.array_payload = .{
.identifier = name_token,
.array_expr = while_node.ast.cond_expr,
@@ -2852,7 +2917,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
if (token_tags[name_token + 1] == .comma) {
const index_token = name_token + 2;
std.debug.assert(token_tags[index_token] == .identifier);
- if (try scope.decls.fetchPut(
+ if (try scopes.items(.decls)[scope_index].fetchPut(
allocator,
tree.tokenSlice(index_token),
.{ .array_index = index_token },
@@ -2867,17 +2932,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
if (while_node.ast.else_expr != 0) {
if (while_node.error_token) |err_token| {
std.debug.assert(token_tags[err_token] == .identifier);
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, err_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.else_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
const name = tree.tokenSlice(err_token);
- try scope.decls.putNoClobber(allocator, name, .{ .ast_node = while_node.ast.else_expr });
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = while_node.ast.else_expr });
}
try makeScopeInternal(allocator, context, while_node.ast.else_expr);
}
@@ -2890,27 +2955,23 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
const cases = tree.extra_data[extra.start..extra.end];
for (cases) |case| {
- const switch_case: Ast.full.SwitchCase = switch (tags[case]) {
- .switch_case => tree.switchCase(case),
- .switch_case_one => tree.switchCaseOne(case),
- else => continue,
- };
+ const switch_case: Ast.full.SwitchCase = tree.fullSwitchCase(case).?;
if (switch_case.payload_token) |payload| {
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, payload),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end,
},
.data = .other,
- };
+ });
+ const scope_index = scopes.len - 1;
// if payload is *name than get next token
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
const name = tree.tokenSlice(name_token);
- try scope.decls.putNoClobber(allocator, name, .{
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{
.switch_payload = .{
.node = name_token,
.switch_expr = cond,
@@ -2935,7 +2996,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.aligned_var_decl,
.simple_var_decl,
=> {
- const var_decl = ast.varDecl(tree, node_idx).?;
+ const var_decl = tree.fullVarDecl(node_idx).?;
if (var_decl.ast.type_node != 0) {
try makeScopeInternal(allocator, context, var_decl.ast.type_node);
}
@@ -2954,7 +3015,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.async_call_one_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const call = ast.callFull(tree, node_idx, &buf).?;
+ const call = tree.fullCall(&buf, node_idx).?;
try makeScopeInternal(allocator, context, call.ast.fn_expr);
for (call.ast.params) |param|
@@ -2970,13 +3031,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.struct_init_one_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
- const struct_init: Ast.full.StructInit = switch (node_tag) {
- .struct_init, .struct_init_comma => tree.structInit(node_idx),
- .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx),
- .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx),
- .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node_idx),
- else => unreachable,
- };
+ const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node_idx).?;
if (struct_init.ast.type_expr != 0)
try makeScopeInternal(allocator, context, struct_init.ast.type_expr);
@@ -2995,13 +3050,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.array_init_one_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
- const array_init: Ast.full.ArrayInit = switch (node_tag) {
- .array_init, .array_init_comma => tree.arrayInit(node_idx),
- .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx),
- .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx),
- .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node_idx),
- else => unreachable,
- };
+ const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node_idx).?;
if (array_init.ast.type_expr != 0)
try makeScopeInternal(allocator, context, array_init.ast.type_expr);
@@ -3013,7 +3062,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.container_field_align,
.container_field_init,
=> {
- const field = ast.containerField(tree, node_idx).?;
+ const field = tree.fullContainerField(node_idx).?;
try makeScopeInternal(allocator, context, field.ast.type_expr);
try makeScopeInternal(allocator, context, field.ast.align_expr);
@@ -3036,7 +3085,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
- const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node_idx).?;
+ const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node_idx).?;
try makeScopeInternal(allocator, context, ptr_type.ast.sentinel);
try makeScopeInternal(allocator, context, ptr_type.ast.align_node);
@@ -3046,12 +3095,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.slice_open,
.slice_sentinel,
=> {
- const slice: Ast.full.Slice = switch (node_tag) {
- .slice => tree.slice(node_idx),
- .slice_open => tree.sliceOpen(node_idx),
- .slice_sentinel => tree.sliceSentinel(node_idx),
- else => unreachable,
- };
+ const slice: Ast.full.Slice = tree.fullSlice(node_idx).?;
+
try makeScopeInternal(allocator, context, slice.ast.sliced);
try makeScopeInternal(allocator, context, slice.ast.start);
try makeScopeInternal(allocator, context, slice.ast.end);
@@ -3061,18 +3106,17 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
const expr = data[node_idx].rhs;
if (data[node_idx].lhs != 0) {
const payload_token = data[node_idx].lhs;
- var scope = try scopes.addOne(allocator);
- scope.* = .{
+ try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, payload_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, expr)).end,
},
.data = .other,
- };
- errdefer scope.decls.deinit(allocator);
+ });
+ const scope_index = scopes.len - 1;
const name = tree.tokenSlice(payload_token);
- try scope.decls.putNoClobber(allocator, name, .{ .ast_node = expr });
+ try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{ .ast_node = expr });
}
try makeScopeInternal(allocator, context, expr);
diff --git a/src/ast.zig b/src/ast.zig
index 5c7f5ce..cfbf935 100644
--- a/src/ast.zig
+++ b/src/ast.zig
@@ -7,7 +7,7 @@ const Ast = std.zig.Ast;
const Node = Ast.Node;
const full = Ast.full;
-fn fullPtrType(tree: Ast, info: full.PtrType.Components) full.PtrType {
+fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType {
const token_tags = tree.tokens.items(.tag);
const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) {
.asterisk,
@@ -57,7 +57,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type);
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.lhs, Node.PtrType);
- return fullPtrType(tree, .{
+ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node],
.align_node = extra.align_node,
.addrspace_node = extra.addrspace_node,
@@ -71,7 +71,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
const data = tree.nodes.items(.data)[node];
- return fullPtrType(tree, .{
+ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node],
.align_node = 0,
.addrspace_node = 0,
@@ -85,7 +85,7 @@ pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
const data = tree.nodes.items(.data)[node];
- return fullPtrType(tree, .{
+ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node],
.align_node = data.lhs,
.addrspace_node = 0,
@@ -100,7 +100,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
- return fullPtrType(tree, .{
+ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node],
.align_node = extra.align_node,
.addrspace_node = extra.addrspace_node,
@@ -111,7 +111,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
});
}
-fn fullIf(tree: Ast, info: full.If.Components) full.If {
+fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
const token_tags = tree.tokens.items(.tag);
var result: full.If = .{
.ast = info,
@@ -137,27 +137,29 @@ fn fullIf(tree: Ast, info: full.If.Components) full.If {
}
pub fn ifFull(tree: Ast, node: Node.Index) full.If {
+ std.debug.assert(tree.nodes.items(.tag)[node] == .@"if");
const data = tree.nodes.items(.data)[node];
- if (tree.nodes.items(.tag)[node] == .@"if") {
- const extra = tree.extraData(data.rhs, Node.If);
- return fullIf(tree, .{
- .cond_expr = data.lhs,
- .then_expr = extra.then_expr,
- .else_expr = extra.else_expr,
- .if_token = tree.nodes.items(.main_token)[node],
- });
- } else {
- std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
- return fullIf(tree, .{
- .cond_expr = data.lhs,
- .then_expr = data.rhs,
- .else_expr = 0,
- .if_token = tree.nodes.items(.main_token)[node],
- });
- }
+ const extra = tree.extraData(data.rhs, Node.If);
+ return fullIfComponents(tree, .{
+ .cond_expr = data.lhs,
+ .then_expr = extra.then_expr,
+ .else_expr = extra.else_expr,
+ .if_token = tree.nodes.items(.main_token)[node],
+ });
}
-fn fullWhile(tree: Ast, info: full.While.Components) full.While {
+pub fn ifSimple(tree: Ast, node: Node.Index) full.If {
+ std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
+ const data = tree.nodes.items(.data)[node];
+ return fullIfComponents(tree, .{
+ .cond_expr = data.lhs,
+ .then_expr = data.rhs,
+ .else_expr = 0,
+ .if_token = tree.nodes.items(.main_token)[node],
+ });
+}
+
+fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
const token_tags = tree.tokens.items(.tag);
var result: full.While = .{
.ast = info,
@@ -194,7 +196,7 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While {
pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
- return fullWhile(tree, .{
+ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
@@ -206,7 +208,7 @@ pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
pub fn whileCont(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.WhileCont);
- return fullWhile(tree, .{
+ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = extra.cont_expr,
@@ -218,7 +220,7 @@ pub fn whileCont(tree: Ast, node: Node.Index) full.While {
pub fn whileFull(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.While);
- return fullWhile(tree, .{
+ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = extra.cont_expr,
@@ -229,7 +231,7 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While {
pub fn forSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
- return fullWhile(tree, .{
+ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
@@ -241,7 +243,7 @@ pub fn forSimple(tree: Ast, node: Node.Index) full.While {
pub fn forFull(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.If);
- return fullWhile(tree, .{
+ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
@@ -250,6 +252,35 @@ pub fn forFull(tree: Ast, node: Node.Index) full.While {
});
}
+pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
+ return switch (tree.nodes.items(.tag)[node]) {
+ .ptr_type_aligned => tree.ptrTypeAligned(node),
+ .ptr_type_sentinel => tree.ptrTypeSentinel(node),
+ .ptr_type => tree.ptrType(node),
+ .ptr_type_bit_range => tree.ptrTypeBitRange(node),
+ else => null,
+ };
+}
+
+pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
+ return switch (tree.nodes.items(.tag)[node]) {
+ .if_simple => tree.ifSimple(node),
+ .@"if" => tree.ifFull(node),
+ else => null,
+ };
+}
+
+pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
+ return switch (tree.nodes.items(.tag)[node]) {
+ .while_simple => tree.whileSimple(node),
+ .while_cont => tree.whileCont(node),
+ .@"while" => tree.whileFull(node),
+ .for_simple => tree.forSimple(node),
+ .@"for" => tree.forFull(node),
+ else => null,
+ };
+}
+
pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
const TokenIndex = Ast.TokenIndex;
const tags = tree.nodes.items(.tag);
@@ -911,36 +942,6 @@ pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
}
-pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
- return switch (tree.nodes.items(.tag)[node]) {
- .container_field => tree.containerField(node),
- .container_field_init => tree.containerFieldInit(node),
- .container_field_align => tree.containerFieldAlign(node),
- else => null,
- };
-}
-
-pub fn ptrType(tree: Ast, node: Ast.Node.Index) ?Ast.full.PtrType {
- return switch (tree.nodes.items(.tag)[node]) {
- .ptr_type => ptrTypeSimple(tree, node),
- .ptr_type_aligned => ptrTypeAligned(tree, node),
- .ptr_type_bit_range => ptrTypeBitRange(tree, node),
- .ptr_type_sentinel => ptrTypeSentinel(tree, node),
- else => null,
- };
-}
-
-pub fn whileAst(tree: Ast, node: Ast.Node.Index) ?Ast.full.While {
- return switch (tree.nodes.items(.tag)[node]) {
- .@"while" => whileFull(tree, node),
- .while_simple => whileSimple(tree, node),
- .while_cont => whileCont(tree, node),
- .@"for" => forFull(tree, node),
- .for_simple => forSimple(tree, node),
- else => null,
- };
-}
-
pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.container_decl,
@@ -962,58 +963,6 @@ pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
};
}
-pub fn containerDecl(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) ?full.ContainerDecl {
- return switch (tree.nodes.items(.tag)[node_idx]) {
- .container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
- .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
- .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx),
- .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
- .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
- .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx),
- else => null,
- };
-}
-
-/// Returns the member indices of a given declaration container.
-/// Asserts given `tag` is a container node
-pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
- std.debug.assert(isContainer(tree, node_idx));
- return switch (tree.nodes.items(.tag)[node_idx]) {
- .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
- .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members,
- .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx).ast.members,
- .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members,
- .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members,
- .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members,
- .root => tree.rootDecls(),
- .error_set_decl => &[_]Ast.Node.Index{},
- else => unreachable,
- };
-}
-
-/// Returns an `ast.full.VarDecl` for a given node index.
-/// Returns null if the tag doesn't match
-pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
- return switch (tree.nodes.items(.tag)[node_idx]) {
- .global_var_decl => tree.globalVarDecl(node_idx),
- .local_var_decl => tree.localVarDecl(node_idx),
- .aligned_var_decl => tree.alignedVarDecl(node_idx),
- .simple_var_decl => tree.simpleVarDecl(node_idx),
- else => null,
- };
-}
-
-pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
- return switch (tree.nodes.items(.tag)[node]) {
- .ptr_type,
- .ptr_type_aligned,
- .ptr_type_bit_range,
- .ptr_type_sentinel,
- => true,
- else => false,
- };
-}
-
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.builtin_call,
@@ -1051,45 +1000,6 @@ pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
};
}
-pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool {
- return switch (tree.nodes.items(.tag)[node]) {
- .fn_proto,
- .fn_proto_multi,
- .fn_proto_one,
- .fn_proto_simple,
- => false,
- .fn_decl => true,
- else => null,
- };
-}
-
-pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
- return switch (tree.nodes.items(.tag)[node]) {
- .fn_proto => tree.fnProto(node),
- .fn_proto_multi => tree.fnProtoMulti(node),
- .fn_proto_one => tree.fnProtoOne(buf, node),
- .fn_proto_simple => tree.fnProtoSimple(buf, node),
- .fn_decl => fnProto(tree, tree.nodes.items(.data)[node].lhs, buf),
- else => null,
- };
-}
-
-pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
- return switch (tree.nodes.items(.tag)[node]) {
- .call,
- .call_comma,
- .async_call,
- .async_call_comma,
- => tree.callFull(node),
- .call_one,
- .call_one_comma,
- .async_call_one,
- .async_call_one_comma,
- => tree.callOne(buf, node),
- else => null,
- };
-}
-
/// returns a list of parameters
pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
const node_data = tree.nodes.items(.data);
diff --git a/src/debug.zig b/src/debug.zig
index 02a1b38..d5fecd6 100644
--- a/src/debug.zig
+++ b/src/debug.zig
@@ -43,8 +43,10 @@ pub fn printTree(tree: std.zig.Ast) void {
pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void {
if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!");
- for (doc_scope.scopes.items) |scope, i| {
- if (i != 0) std.debug.print("\n\n", .{});
+ var index: usize = 0;
+ while(index < doc_scope.scopes.len) : (index += 1) {
+ const scope = doc_scope.scopes.get(index);
+ if (index != 0) std.debug.print("\n\n", .{});
std.debug.print(
\\[{d}, {d}] {}
\\usingnamespaces: {d}
diff --git a/src/inlay_hints.zig b/src/inlay_hints.zig
index b1351ee..ef3fc23 100644
--- a/src/inlay_hints.zig
+++ b/src/inlay_hints.zig
@@ -84,7 +84,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
switch (decl.*) {
.ast_node => |fn_node| {
var buffer: [1]Ast.Node.Index = undefined;
- if (ast.fnProto(decl_tree, fn_node, &buffer)) |fn_proto| {
+ if (decl_tree.fullFnProto(&buffer, fn_node)) |fn_proto| {
var i: usize = 0;
var it = fn_proto.iterate(&decl_tree);
@@ -282,7 +282,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.async_call_comma,
=> {
var params: [1]Ast.Node.Index = undefined;
- const call = ast.callFull(tree, node, ¶ms).?;
+ const call = tree.fullCall(¶ms, node).?;
try writeCallNodeHint(builder, arena, store, call);
for (call.ast.params) |param| {
@@ -351,7 +351,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.ptr_type,
.ptr_type_bit_range,
=> {
- const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node).?;
+ const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.sentinel != 0) {
return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range });
@@ -458,12 +458,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.slice,
.slice_sentinel,
=> {
- const slice: Ast.full.Slice = switch (tag) {
- .slice => tree.slice(node),
- .slice_open => tree.sliceOpen(node),
- .slice_sentinel => tree.sliceSentinel(node),
- else => unreachable,
- };
+ const slice: Ast.full.Slice = tree.fullSlice(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range });
@@ -481,13 +476,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.array_init_comma,
=> {
var buffer: [2]Ast.Node.Index = undefined;
- const array_init: Ast.full.ArrayInit = switch (tag) {
- .array_init, .array_init_comma => tree.arrayInit(node),
- .array_init_one, .array_init_one_comma => tree.arrayInitOne(buffer[0..1], node),
- .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
- .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buffer, node),
- else => unreachable,
- };
+ const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range });
for (array_init.ast.elements) |elem| {
@@ -505,13 +494,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.struct_init_comma,
=> {
var buffer: [2]Ast.Node.Index = undefined;
- const struct_init: Ast.full.StructInit = switch (tag) {
- .struct_init, .struct_init_comma => tree.structInit(node),
- .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
- .struct_init_one, .struct_init_one_comma => tree.structInitOne(buffer[0..1], node),
- .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buffer, node),
- else => unreachable,
- };
+ const struct_init: Ast.full.StructInit = tree.fullStructInit(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range });
@@ -546,7 +529,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.switch_case_inline_one,
.switch_case_inline,
=> {
- const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node);
+ const switch_case = tree.fullSwitchCase(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range });
},
@@ -557,7 +540,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.for_simple,
.@"for",
=> {
- const while_node = ast.whileAst(tree, node).?;
+ const while_node = ast.fullWhile(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range });
@@ -571,7 +554,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.if_simple,
.@"if",
=> {
- const if_node = ast.ifFull(tree, node);
+ const if_node = ast.fullIf(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range });
@@ -584,7 +567,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.fn_decl,
=> {
var buffer: [1]Ast.Node.Index = undefined;
- const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buffer).?;
+ const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buffer, node).?;
var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param_decl| {
@@ -617,7 +600,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.tagged_union_enum_tag_trailing,
=> {
var buffer: [2]Ast.Node.Index = undefined;
- const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buffer).?;
+ const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range });
@@ -634,7 +617,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.container_field_align,
.container_field,
=> {
- const container_field = ast.containerField(tree, node).?;
+ const container_field = tree.fullContainerField(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range });
@@ -666,11 +649,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.asm_output,
.asm_input,
=> {
- const asm_node: Ast.full.Asm = switch (tag) {
- .@"asm" => tree.asmFull(node),
- .asm_simple => tree.asmSimple(node),
- else => return,
- };
+ const asm_node: Ast.full.Asm = tree.fullAsm(node) orelse return;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range });
},
@@ -700,8 +679,7 @@ pub fn writeRangeInlayHint(
.encoding = encoding,
};
- var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(handle.tree, 0, &buf)) |child| {
+ for (handle.tree.rootDecls()) |child| {
if (!isNodeInRange(handle.tree, child, range)) continue;
try writeNodeInlayHint(&builder, arena, store, child, range);
}
diff --git a/src/references.zig b/src/references.zig
index e508ea9..07f9cca 100644
--- a/src/references.zig
+++ b/src/references.zig
@@ -118,18 +118,19 @@ fn symbolReferencesInternal(
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.root,
- .error_set_decl,
=> {
var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(tree, node, &buf)) |member|
+ const container_decl = tree.fullContainerDecl(&buf, node).?;
+ for (container_decl.ast.members) |member|
try symbolReferencesInternal(builder, member, handle, false);
},
+ .error_set_decl => {},
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
- const var_decl = ast.varDecl(tree, node).?;
+ const var_decl = tree.fullVarDecl(node).?;
try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false);
try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false);
},
@@ -137,7 +138,7 @@ fn symbolReferencesInternal(
.container_field_align,
.container_field_init,
=> {
- const field = ast.containerField(tree, node).?;
+ const field = tree.fullContainerField(node).?;
try symbolReferencesInternal(builder, field.ast.type_expr, handle, false);
try symbolReferencesInternal(builder, field.ast.value_expr, handle, false);
},
@@ -152,7 +153,7 @@ fn symbolReferencesInternal(
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const fn_proto = ast.fnProto(tree, node, &buf).?;
+ const fn_proto = tree.fullFnProto(&buf, node).?;
var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param| {
try symbolReferencesInternal(builder, param.type_expr, handle, false);
@@ -179,16 +180,10 @@ fn symbolReferencesInternal(
},
.switch_case_one,
.switch_case_inline_one,
- => {
- const case_one = tree.switchCaseOne(node);
- try symbolReferencesInternal(builder, case_one.ast.target_expr, handle, false);
- for (case_one.ast.values) |val|
- try symbolReferencesInternal(builder, val, handle, false);
- },
.switch_case,
.switch_case_inline,
=> {
- const case = tree.switchCase(node);
+ const case = tree.fullSwitchCase(node).?;
try symbolReferencesInternal(builder, case.ast.target_expr, handle, false);
for (case.ast.values) |val|
try symbolReferencesInternal(builder, val, handle, false);
@@ -199,7 +194,7 @@ fn symbolReferencesInternal(
.for_simple,
.@"for",
=> {
- const loop = ast.whileAst(tree, node).?;
+ const loop = ast.fullWhile(tree, node).?;
try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false);
@@ -208,7 +203,7 @@ fn symbolReferencesInternal(
.@"if",
.if_simple,
=> {
- const if_node = ast.ifFull(tree, node);
+ const if_node = ast.fullIf(tree, node).?;
try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false);
@@ -218,7 +213,7 @@ fn symbolReferencesInternal(
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
- const ptr_type = ast.ptrType(tree, node).?;
+ const ptr_type = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.align_node != 0) {
try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false);
@@ -239,15 +234,10 @@ fn symbolReferencesInternal(
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
- => |tag| {
+ => {
var buf: [2]Ast.Node.Index = undefined;
- const array_init = switch (tag) {
- .array_init, .array_init_comma => tree.arrayInit(node),
- .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
- .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
- .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
- else => unreachable,
- };
+ const array_init = tree.fullArrayInit(&buf, node).?;
+
try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false);
for (array_init.ast.elements) |e|
try symbolReferencesInternal(builder, e, handle, false);
@@ -260,15 +250,10 @@ fn symbolReferencesInternal(
.struct_init_dot_two_comma,
.struct_init_one,
.struct_init_one_comma,
- => |tag| {
+ => {
var buf: [2]Ast.Node.Index = undefined;
- const struct_init: Ast.full.StructInit = switch (tag) {
- .struct_init, .struct_init_comma => tree.structInit(node),
- .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
- .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
- .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
- else => unreachable,
- };
+ const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
+
try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false);
for (struct_init.ast.fields) |field|
try symbolReferencesInternal(builder, field, handle, false);
@@ -283,7 +268,7 @@ fn symbolReferencesInternal(
.async_call_one_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const call = ast.callFull(tree, node, &buf).?;
+ const call = tree.fullCall(&buf, node).?;
try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false);
@@ -294,13 +279,8 @@ fn symbolReferencesInternal(
.slice,
.slice_sentinel,
.slice_open,
- => |tag| {
- const slice: Ast.full.Slice = switch (tag) {
- .slice => tree.slice(node),
- .slice_open => tree.sliceOpen(node),
- .slice_sentinel => tree.sliceSentinel(node),
- else => unreachable,
- };
+ => {
+ const slice: Ast.full.Slice = tree.fullSlice(node).?;
try symbolReferencesInternal(builder, slice.ast.sliced, handle, false);
try symbolReferencesInternal(builder, slice.ast.start, handle, false);
@@ -514,13 +494,13 @@ pub fn symbolReferences(
.param_payload => |pay| blk: {
// Rename the param tok.
const param = pay.param;
- for (curr_handle.document_scope.scopes.items) |scope| {
- if (scope.data != .function) continue;
+ for (curr_handle.document_scope.scopes.items(.data)) |scope_data| {
+ if (scope_data != .function) continue;
- const proto = scope.data.function;
+ const proto = scope_data.function;
var buf: [1]Ast.Node.Index = undefined;
- const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?;
+ const fn_proto = curr_handle.tree.fullFnProto(&buf, proto).?;
var it = fn_proto.iterate(&curr_handle.tree);
while (ast.nextFnParam(&it)) |candidate| {
diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig
index 788cd50..0484971 100644
--- a/src/semantic_tokens.zig
+++ b/src/semantic_tokens.zig
@@ -339,7 +339,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.simple_var_decl,
.aligned_var_decl,
=> {
- const var_decl = ast.varDecl(tree, node).?;
+ const var_decl = tree.fullVarDecl(node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
try writeDocComments(builder, tree, comment_idx);
@@ -386,7 +386,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.tagged_union_two_trailing,
=> {
var buf: [2]Ast.Node.Index = undefined;
- const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buf).?;
+ const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buf, node).?;
try writeToken(builder, decl.layout_token, .keyword);
try writeToken(builder, decl.ast.main_token, .keyword);
@@ -446,7 +446,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
- const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buf).?;
+ const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
try writeDocComments(builder, tree, docs);
@@ -523,7 +523,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.switch_case_inline_one,
.switch_case_inline,
=> {
- const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node);
+ const switch_case = tree.fullSwitchCase(node).?;
try writeToken(builder, switch_case.inline_token, .keyword);
for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node });
// check it it's 'else'
@@ -541,7 +541,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.for_simple,
.@"for",
=> {
- const while_node = ast.whileAst(tree, node).?;
+ const while_node = ast.fullWhile(tree, node).?;
try writeToken(builder, while_node.label_token, .label);
try writeToken(builder, while_node.inline_token, .keyword);
try writeToken(builder, while_node.ast.while_token, .keyword);
@@ -575,7 +575,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.@"if",
.if_simple,
=> {
- const if_node = ast.ifFull(tree, node);
+ const if_node = ast.fullIf(tree, node).?;
try writeToken(builder, if_node.ast.if_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr });
@@ -609,13 +609,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.array_init_dot_two_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
- const array_init: Ast.full.ArrayInit = switch (tag) {
- .array_init, .array_init_comma => tree.arrayInit(node),
- .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
- .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
- .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
- else => unreachable,
- };
+ const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node).?;
try callWriteNodeTokens(allocator, .{ builder, array_init.ast.type_expr });
for (array_init.ast.elements) |elem| try callWriteNodeTokens(allocator, .{ builder, elem });
@@ -630,13 +624,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.struct_init_dot_two_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
- const struct_init: Ast.full.StructInit = switch (tag) {
- .struct_init, .struct_init_comma => tree.structInit(node),
- .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
- .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
- .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
- else => unreachable,
- };
+ const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
var field_token_type: ?TokenType = null;
@@ -674,7 +662,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.async_call_one_comma,
=> {
var params: [1]Ast.Node.Index = undefined;
- const call = ast.callFull(tree, node, ¶ms).?;
+ const call = tree.fullCall(¶ms, node).?;
try writeToken(builder, call.async_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr });
@@ -690,12 +678,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.slice_open,
.slice_sentinel,
=> {
- const slice: Ast.full.Slice = switch (tag) {
- .slice => tree.slice(node),
- .slice_open => tree.sliceOpen(node),
- .slice_sentinel => tree.sliceSentinel(node),
- else => unreachable,
- };
+ const slice: Ast.full.Slice = tree.fullSlice(node).?;
try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced });
try callWriteNodeTokens(allocator, .{ builder, slice.ast.start });
@@ -772,11 +755,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.asm_input,
.asm_simple,
=> {
- const asm_node: Ast.full.Asm = switch (tag) {
- .@"asm" => tree.asmFull(node),
- .asm_simple => tree.asmSimple(node),
- else => return, // TODO Inputs, outputs
- };
+ const asm_node: Ast.full.Asm = tree.fullAsm(node).?;
try writeToken(builder, main_token, .keyword);
try writeToken(builder, asm_node.volatile_token, .keyword);
@@ -920,7 +899,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
- const ptr_type = ast.ptrType(tree, node).?;
+ const ptr_type = ast.fullPtrType(tree, node).?;
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
main_token == main_tokens[ptr_type.ast.child_type])
@@ -955,10 +934,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.array_type,
.array_type_sentinel,
=> {
- const array_type: Ast.full.ArrayType = if (tag == .array_type)
- tree.arrayType(node)
- else
- tree.arrayTypeSentinel(node);
+ const array_type: Ast.full.ArrayType = tree.fullArrayType(node).?;
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count });
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel });
@@ -988,7 +964,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void {
const tree = builder.handle.tree;
- const container_field = ast.containerField(tree, node).?;
+ const container_field = tree.fullContainerField(node).?;
const base = tree.nodes.items(.main_token)[node];
const tokens = tree.tokens.items(.tag);
@@ -1033,8 +1009,7 @@ pub fn writeAllSemanticTokens(
var builder = Builder.init(arena, store, handle, encoding);
// reverse the ast from the root declarations
- var buf: [2]Ast.Node.Index = undefined;
- for (ast.declMembers(handle.tree, 0, &buf)) |child| {
+ for (handle.tree.rootDecls()) |child| {
writeNodeTokens(&builder, child) catch |err| switch (err) {
error.MovedBackwards => break,
else => |e| return e,
diff --git a/src/signature_help.zig b/src/signature_help.zig
index e47e72d..b14bf17 100644
--- a/src/signature_help.zig
+++ b/src/signature_help.zig
@@ -275,7 +275,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
};
var buf: [1]Ast.Node.Index = undefined;
- if (ast.fnProto(type_handle.handle.tree, node, &buf)) |proto| {
+ if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo(
document_store,
arena,
@@ -327,7 +327,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
}
}
- if (ast.fnProto(res_handle.tree, node, &buf)) |proto| {
+ if (res_handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo(
document_store,
arena,
diff --git a/src/translate_c.zig b/src/translate_c.zig
index 6c9f864..cf3d2df 100644
--- a/src/translate_c.zig
+++ b/src/translate_c.zig
@@ -69,8 +69,8 @@ fn convertCIncludeInternal(
var writer = output.writer(allocator);
var buffer: [2]Ast.Node.Index = undefined;
- if (ast.isBlock(tree, node)) {
- for (ast.blockStatements(tree, node, &buffer).?) |statement| {
+ if (ast.blockStatements(tree, node, &buffer)) |statements| {
+ for (statements) |statement| {
try callConvertCIncludeInternal(stack_allocator, .{ allocator, stack_allocator, tree, statement, output });
}
} else if (ast.builtinCallParams(tree, node, &buffer)) |params| {
diff --git a/tests/lsp_features/definition.zig b/tests/lsp_features/definition.zig
index caed193..ea4e2b0 100644
--- a/tests/lsp_features/definition.zig
+++ b/tests/lsp_features/definition.zig
@@ -28,15 +28,10 @@ test "definition - cursor is at the end of an identifier" {
}
test "definition - cursor is at the start of an identifier" {
- testDefinition(
+ try testDefinition(
\\fn main() void { <>foo(); }
\\fn foo() void {}
- ) catch |err| switch (err) {
- error.UnresolvedDefinition => {
- // TODO: #891
- },
- else => return err,
- };
+ );
}
fn testDefinition(source: []const u8) !void {
diff --git a/tests/utility/position_context.zig b/tests/utility/position_context.zig
index 2735061..01f13d6 100644
--- a/tests/utility/position_context.zig
+++ b/tests/utility/position_context.zig
@@ -9,91 +9,307 @@ const allocator = std.testing.allocator;
test "position context - var access" {
try testContext(
- \\const this_var = identifier;
+ \\const a_var = identifier;
,
- .var_access,
- "id",
+ .empty,
+ null,
);
try testContext(
- \\const this_var = identifier;
+ \\const a_var = identifier;
+ ,
+ .var_access,
+ "i",
+ );
+ try testContext(
+ \\const a_var = identifier;
+ ,
+ .var_access,
+ "ident",
+ );
+ try testContext(
+ \\const a_var = identifier;
,
.var_access,
"identifier",
);
+ try testContext(
+ \\const a_var = identifier;
+ ,
+ .empty,
+ null,
+ );
+
+ try testContext(
+ \\ fn foo() !Str {
+ ,
+ .var_access,
+ "S",
+ );
+ try testContext(
+ \\ fn foo() !Str {
+ ,
+ .var_access,
+ "Str",
+ );
try testContext(
\\ fn foo() !Str {
,
.var_access,
"Str",
);
- // TODO fix failing test!
+ try testContext(
+ \\ fn foo() !Str {
+ ,
+ .var_access,
+ "Str",
+ );
+
+ // TODO fix failing tests
+ // try testContext(
+ // \\ fn foo() Err!void {
+ // ,
+ // .var_access,
+ // "E",
+ // );
+ // try testContext(
+ // \\ fn foo() Err!void {
+ // ,
+ // .var_access,
+ // "Err",
+ // );
// try testContext(
// \\ fn foo() Err!void {
// ,
// .var_access,
// "Err",
// );
+ // try testContext(
+ // \\ fn foo() Err!void {
+ // ,
+ // .var_access,
+ // "v",
+ // );
+
+ try testContext(
+ \\if (bar.field == foo) {
+ ,
+ .var_access,
+ "b",
+ );
+ try testContext(
+ \\if (bar.field == foo) {
+ ,
+ .var_access,
+ "bar",
+ );
+ try testContext(
+ \\if (bar.field == foo) {
+ ,
+ .var_access,
+ "bar",
+ );
+
+ try testContext(
+ \\if (bar[0].field == foo) {
+ ,
+ .var_access,
+ "bar",
+ );
}
test "position context - field access" {
try testContext(
- \\if (foo.field == foo) {
+ \\if (bar.field == foo) {
,
.field_access,
- "foo.",
+ "bar.f",
);
try testContext(
- \\if (foo.member.field == foo) {
+ \\if (bar.field == foo) {
,
.field_access,
- "foo.member.",
+ "bar.fiel",
);
try testContext(
- \\if (foo.*.?.field == foo) {
+ \\if (bar.field == foo) {
,
.field_access,
- "foo.*.?.",
+ "bar.field",
+ );
+
+ try testContext(
+ \\if (bar.member.field == foo) {
+ ,
+ .field_access,
+ "bar.member",
);
try testContext(
- \\if (foo[0].field == foo) {
+ \\if (bar.member.field == foo) {
,
.field_access,
- "foo[0].",
+ "bar.member.f",
);
try testContext(
- \\if (foo.@"field" == foo) {
+ \\if (bar.member.field == foo) {
,
.field_access,
- "foo.",
+ "bar.member.fiel",
+ );
+ try testContext(
+ \\if (bar.member.field == foo) {
+ ,
+ .field_access,
+ "bar.member.field",
+ );
+
+ try testContext(
+ \\if (bar.*.?.field == foo) {
+ ,
+ .field_access,
+ "bar.*.?",
+ );
+ try testContext(
+ \\if (bar.*.?.field == foo) {
+ ,
+ .field_access,
+ "bar.*.?.f",
+ );
+
+ try testContext(
+ \\if (bar[0].field == foo) {
+ ,
+ .field_access,
+ "bar[0].f",
+ );
+
+ try testContext(
+ \\if (bar.@"field" == foo) {
+ ,
+ .field_access,
+ "bar.@\"",
+ );
+ try testContext(
+ \\if (bar.@"field" == foo) {
+ ,
+ .field_access,
+ "bar.@\"fiel",
+ );
+ try testContext(
+ \\if (bar.@"field" == foo) {
+ ,
+ .field_access,
+ "bar.@\"field\"",
+ );
+
+ try testContext(
+ \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator);
+ ,
+ .field_access,
+ "std.ArrayList(SomeStruct(a, b, c, d)).i",
);
try testContext(
\\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator);
,
.field_access,
- "std.ArrayList(SomeStruct(a, b, c, d)).in",
+ "std.ArrayList(SomeStruct(a, b, c, d)).ini",
);
try testContext(
- \\fn foo() !Foo.b {
+ \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init(allocator);
+ ,
+ .field_access,
+ "std.ArrayList(SomeStruct(a, b, c, d)).init",
+ );
+
+ try testContext(
+ \\fn foo() !Foo.bar {
,
.field_access,
"Foo.b",
);
- // TODO fix failing test!
+ try testContext(
+ \\fn foo() !Foo.bar {
+ ,
+ .field_access,
+ "Foo.bar",
+ );
+ try testContext(
+ \\fn foo() !Foo.bar {
+ ,
+ .field_access,
+ "Foo.bar",
+ );
+
+ // TODO fix failing tests
// try testContext(
- // \\fn foo() Foo.b!void {
+ // \\fn foo() Foo.bar!void {
// ,
// .field_access,
// "Foo.b",
// );
+ // try testContext(
+ // \\fn foo() Foo.bar!void {
+ // ,
+ // .field_access,
+ // "Foo.bar",
+ // );
+ // try testContext(
+ // \\fn foo() Foo.bar!void {
+ // ,
+ // .field_access,
+ // "Foo.bar",
+ // );
}
test "position context - builtin" {
+ try testContext(
+ \\var foo = @
+ ,
+ .empty,
+ null,
+ );
+ try testContext(
+ \\var foo = @intC(u32, 5);
+ ,
+ .builtin,
+ "@i",
+ );
+ try testContext(
+ \\var foo = @intC(u32, 5);
+ ,
+ .builtin,
+ "@i",
+ );
+ try testContext(
+ \\var foo = @intC(u32, 5);
+ ,
+ .builtin,
+ "@intC",
+ );
try testContext(
\\var foo = @intC(u32, 5);
,
.builtin,
"@intC",
);
+
+ try testContext(
+ \\fn foo() void { @setRuntime(false); };
+ ,
+ .builtin,
+ "@s",
+ );
+ try testContext(
+ \\fn foo() void { @setRuntime(false); };
+ ,
+ .builtin,
+ "@s",
+ );
+ try testContext(
+ \\fn foo() void { @setRuntime(false); };
+ ,
+ .builtin,
+ "@setR",
+ );
try testContext(
\\fn foo() void { @setRuntime(false); };
,
@@ -118,6 +334,12 @@ test "position context - comment" {
}
test "position context - import/embedfile string literal" {
+ try testContext(
+ \\const std = @import("st");
+ ,
+ .import_string_literal,
+ "\"st", // maybe report just "st"
+ );
try testContext(
\\const std = @import("st");
,
@@ -130,6 +352,12 @@ test "position context - import/embedfile string literal" {
.embedfile_string_literal,
"\"file.", // maybe report just "file."
);
+ try testContext(
+ \\const std = @embedFile("file.");
+ ,
+ .embedfile_string_literal,
+ "\"file", // maybe report just "file."
+ );
}
test "position context - string literal" {
@@ -137,29 +365,49 @@ test "position context - string literal" {
\\var foo = "hello world!";
,
.string_literal,
- "\"he", // maybe report just "he"
+ "\"hel", // maybe report just "he"
);
try testContext(
- \\var foo = \\hello;
+ \\var foo = \\hello;
,
.string_literal,
- "\\\\hello", // maybe report just "hello"
+ "\\\\hello", // maybe report just "hello;"
);
}
test "position context - global error set" {
+ // TODO why is this a .var_access instead of a .global_error_set?
+ // try testContext(
+ // \\fn foo() error!void {
+ // ,
+ // .global_error_set,
+ // null,
+ // );
+ try testContext(
+ \\fn foo() error!void {
+ ,
+ .global_error_set,
+ null,
+ );
try testContext(
\\fn foo() error!void {
,
.global_error_set,
null,
);
+ try testContext(
+ \\fn foo() error.!void {
+ ,
+ .global_error_set,
+ null,
+ );
try testContext(
\\fn foo() error.!void {
,
.global_error_set,
null,
);
+
// TODO this should probably also be .global_error_set
// try testContext(
// \\fn foo() error{}!void {
@@ -176,12 +424,30 @@ test "position context - global error set" {
}
test "position context - enum literal" {
+ try testContext(
+ \\var foo = .tag;
+ ,
+ .enum_literal,
+ null,
+ );
+ try testContext(
+ \\var foo = .tag;
+ ,
+ .enum_literal,
+ null,
+ );
try testContext(
\\var foo = .tag;
,
.enum_literal,
null,
);
+ try testContext(
+ \\var foo = .;
+ ,
+ .empty,
+ null,
+ );
try testContext(
\\var foo = .;
,
@@ -191,6 +457,24 @@ test "position context - enum literal" {
}
test "position context - label" {
+ try testContext(
+ \\var foo = blk: { break :blk null };
+ ,
+ .pre_label,
+ null,
+ );
+ try testContext(
+ \\var foo = blk: { break :blk null };
+ ,
+ .label,
+ null,
+ );
+ try testContext(
+ \\var foo = blk: { break :blk null };
+ ,
+ .label,
+ null,
+ );
try testContext(
\\var foo = blk: { break :blk null };
,
@@ -206,12 +490,6 @@ test "position context - empty" {
.empty,
null,
);
- try testContext(
- \\const foo = struct {};
- ,
- .empty,
- null,
- );
try testContext(
\\try foo(arg, slice[]);
,
@@ -237,7 +515,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "".len ..] });
defer allocator.free(final_line);
- const ctx = try analysis.getPositionContext(allocator, line, cursor_idx);
+ const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx, true);
if (std.meta.activeTag(ctx) != tag) {
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
@@ -253,7 +531,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
const expected_range = maybe_range orelse {
std.debug.print("Expected null range, got `{s}`\n", .{
- line[actual_loc.start..actual_loc.end],
+ final_line[actual_loc.start..actual_loc.end],
});
return error.DifferentRange;
};
@@ -263,8 +541,8 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) {
std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{
- line[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
- line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
+ final_line[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
+ final_line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
});
return error.DifferentRange;
}