diff --git a/src/analysis.zig b/src/analysis.zig index 92849e3..debc328 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -808,8 +808,8 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl }, .field_access => { if (datas[node].rhs == 0) return null; - if (node >= tree.nodes.len - 1) return null; // #boundsCheck - const rhs_str = tree.tokenSlice(datas[node].rhs); + const rhs_str = ast.tokenSlice(tree, datas[node].rhs) catch return null; + // If we are accessing a pointer type, remove one pointerness level :) const left_type = try resolveFieldAccessLhsType( store, @@ -1342,7 +1342,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { .fn_decl, => if (ast.fnProto(tree, node, &buf).?.name_token) |name| return tree.tokenSlice(name), - .field_access => return tree.tokenSlice(data[node].rhs), + .field_access => return ast.tokenSlice(tree, data[node].rhs) catch return null, .call, .call_comma, .async_call, diff --git a/src/ast.zig b/src/ast.zig index 86761e2..dcdda74 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -1215,3 +1215,26 @@ pub fn nextFnParam(it: *Ast.full.FnProto.Iterator) ?Ast.full.FnProto.Param { it.tok_flag = false; } } + +/// A modified version of tree.tokenSlice that returns an error.UnexpectedToken if the tokenizer encounters an unexpected token +// https://github.com/zigtools/zls/issues/381 +pub fn tokenSlice(tree: Ast, token_index: Ast.TokenIndex) ![]const u8 { + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + const token_tag = token_tags[token_index]; + + // Many tokens can be determined entirely by their tag. + if (token_tag.lexeme()) |lexeme| { + return lexeme; + } + + // For some tokens, re-tokenization is needed to find the end. + var tokenizer: std.zig.Tokenizer = .{ + .buffer = tree.source, + .index = token_starts[token_index], + .pending_invalid_token = null, + }; + const token = tokenizer.next(); + if (token.tag != token_tag) return error.UnexpectedToken; // assert(token.tag == token_tag); + return tree.source[token.loc.start..token.loc.end]; +} diff --git a/src/offsets.zig b/src/offsets.zig index 6573e80..ee61866 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -182,7 +182,8 @@ pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc { }; const token = tokenizer.next(); - std.debug.assert(token.tag == tag); + // HACK, should return error.UnextectedToken + if (token.tag != tag) return .{ .start = 0, .end = 0 }; //std.debug.assert(token.tag == tag); return .{ .start = token.loc.start, .end = token.loc.end }; } diff --git a/src/references.zig b/src/references.zig index 05a63bb..6378826 100644 --- a/src/references.zig +++ b/src/references.zig @@ -382,7 +382,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .field_access => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); - const rhs_str = tree.tokenSlice(datas[node].rhs); + const rhs_str = ast.tokenSlice(tree, datas[node].rhs) catch return; var bound_type_params = analysis.BoundTypeParams{}; const left_type = try analysis.resolveFieldAccessLhsType( store, diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index d8af3d5..90a48ac 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -854,7 +854,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .field_access => { const data = node_data[node]; if (data.rhs == 0) return; - const rhs_str = tree.tokenSlice(data.rhs); + const rhs_str = ast.tokenSlice(tree, data.rhs) catch return; try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs });