Merge pull request #308 from InterplanetaryEngineer/master

Fix inclusion of toplevel doc comments, remove @async recursion in writeNodeTokens, add a few regression tests
This commit is contained in:
Alexandros Naskos 2021-04-16 01:19:50 -07:00 committed by GitHub
commit 8f868dfec6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 903 additions and 849 deletions

View File

@ -17,12 +17,7 @@ pub fn deinit() void {
resolve_trail.deinit(); resolve_trail.deinit();
} }
/// Gets a declaration's doc comments, caller must free memory when a value is returned /// Gets a declaration's doc comments. Caller owns returned memory.
/// Like:
///```zig
///var comments = getFunctionDocComments(allocator, tree, func);
///defer if (comments) |comments_pointer| allocator.free(comments_pointer);
///```
pub fn getDocComments( pub fn getDocComments(
allocator: *std.mem.Allocator, allocator: *std.mem.Allocator,
tree: ast.Tree, tree: ast.Tree,
@ -30,15 +25,32 @@ pub fn getDocComments(
format: types.MarkupContent.Kind, format: types.MarkupContent.Kind,
) !?[]const u8 { ) !?[]const u8 {
const base = tree.nodes.items(.main_token)[node]; const base = tree.nodes.items(.main_token)[node];
const base_kind = tree.nodes.items(.tag)[node];
const tokens = tree.tokens.items(.tag); const tokens = tree.tokens.items(.tag);
if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index| { switch (base_kind) {
return try collectDocComments(allocator, tree, doc_comment_index, format); // As far as I know, this does not actually happen yet, but it
// may come in useful.
.root => return try collectDocComments(allocator, tree, 0, format, true),
.fn_proto,
.fn_proto_one,
.fn_proto_simple,
.fn_proto_multi,
.fn_decl,
.local_var_decl,
.global_var_decl,
.aligned_var_decl,
.simple_var_decl,
=> {
if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index|
return try collectDocComments(allocator, tree, doc_comment_index, format, false);
},
else => {},
} }
return null; return null;
} }
/// Get a declaration's doc comment token index /// Get the first doc comment of a declaration.
pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.TokenIndex) ?ast.TokenIndex { pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.TokenIndex) ?ast.TokenIndex {
var idx = base_token; var idx = base_token;
if (idx == 0) return null; if (idx == 0) return null;
@ -50,9 +62,9 @@ pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.Toke
if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1;
// Find first doc comment token // Find first doc comment token
if (!(tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment)) if (!(tokens[idx] == .doc_comment))
return null; return null;
return while (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { return while (tokens[idx] == .doc_comment) {
if (idx == 0) break 0; if (idx == 0) break 0;
idx -= 1; idx -= 1;
} else idx + 1; } else idx + 1;
@ -63,6 +75,7 @@ pub fn collectDocComments(
tree: ast.Tree, tree: ast.Tree,
doc_comments: ast.TokenIndex, doc_comments: ast.TokenIndex,
format: types.MarkupContent.Kind, format: types.MarkupContent.Kind,
container_doc: bool,
) ![]const u8 { ) ![]const u8 {
var lines = std.ArrayList([]const u8).init(allocator); var lines = std.ArrayList([]const u8).init(allocator);
defer lines.deinit(); defer lines.deinit();
@ -70,28 +83,27 @@ pub fn collectDocComments(
var curr_line_tok = doc_comments; var curr_line_tok = doc_comments;
while (true) : (curr_line_tok += 1) { while (true) : (curr_line_tok += 1) {
switch (tokens[curr_line_tok]) { const comm = tokens[curr_line_tok];
.doc_comment, .container_doc_comment => { if ((container_doc and comm == .container_doc_comment) or (!container_doc and comm == .doc_comment)) {
try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces)); try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces));
}, } else break;
else => break,
}
} }
return try std.mem.join(allocator, if (format == .Markdown) " \n" else "\n", lines.items); return try std.mem.join(allocator, if (format == .Markdown) " \n" else "\n", lines.items);
} }
/// Gets a function signature (keywords, name, return value) /// Gets a function's keyword, name, arguments and return value.
pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 {
const start = offsets.tokenLocation(tree, func.ast.fn_token); const start = offsets.tokenLocation(tree, func.ast.fn_token);
// return type can be 0 when user wrote incorrect fn signature
// to ensure we don't break, just end the signature at end of fn token const end = if (func.ast.return_type != 0)
if (func.ast.return_type == 0) return tree.source[start.start..start.end]; offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type))
const end = offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type)).end; else
return tree.source[start.start..end]; start;
return tree.source[start.start..end.end];
} }
/// Gets a function snippet insert text /// Creates snippet insert text for a function. Caller owns returned memory.
pub fn getFunctionSnippet( pub fn getFunctionSnippet(
allocator: *std.mem.Allocator, allocator: *std.mem.Allocator,
tree: ast.Tree, tree: ast.Tree,
@ -197,7 +209,6 @@ pub fn hasSelfParam(
return false; return false;
} }
/// Gets a function signature (keywords, name, return value)
pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 {
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
const end = offsets.tokenLocation(tree, lastToken(tree, var_decl.ast.init_node)).end; const end = offsets.tokenLocation(tree, lastToken(tree, var_decl.ast.init_node)).end;
@ -232,14 +243,19 @@ pub fn isGenericFunction(tree: ast.Tree, func: ast.full.FnProto) bool {
} }
return false; return false;
} }
// STYLE // STYLE
pub fn isCamelCase(name: []const u8) bool { pub fn isCamelCase(name: []const u8) bool {
return !std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null; return !std.ascii.isUpper(name[0]) and !isSnakeCase(name);
} }
pub fn isPascalCase(name: []const u8) bool { pub fn isPascalCase(name: []const u8) bool {
return std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null; return std.ascii.isUpper(name[0]) and !isSnakeCase(name);
}
pub fn isSnakeCase(name: []const u8) bool {
return std.mem.indexOf(u8, name, "_") != null;
} }
// ANALYSIS ENGINE // ANALYSIS ENGINE
@ -499,8 +515,7 @@ pub fn resolveReturnType(
.type = .{ .data = .{ .error_union = child_type_node }, .is_type_val = false }, .type = .{ .data = .{ .error_union = child_type_node }, .is_type_val = false },
.handle = child_type.handle, .handle = child_type.handle,
}; };
} else } else return child_type.instanceTypeVal();
return child_type.instanceTypeVal();
} }
/// Resolves the child type of an optional type /// Resolves the child type of an optional type
@ -698,8 +713,6 @@ pub fn resolveTypeOfNodeInternal(
if (std.meta.eql(i, node_handle)) if (std.meta.eql(i, node_handle))
return null; return null;
} }
// We use the backing allocator here because the ArrayList expects its
// allocated memory to persist while it is empty.
try resolve_trail.append(node_handle); try resolve_trail.append(node_handle);
defer _ = resolve_trail.pop(); defer _ = resolve_trail.pop();
@ -2271,8 +2284,6 @@ fn resolveUse(
// it is self-referential and we cannot resolve it. // it is self-referential and we cannot resolve it.
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null) if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
return null; return null;
// We use the backing allocator here because the ArrayList expects its
// allocated memory to persist while it is empty.
try using_trail.append(symbol.ptr); try using_trail.append(symbol.ptr);
defer _ = using_trail.pop(); defer _ = using_trail.pop();
@ -2636,7 +2647,7 @@ fn makeInnerScope(
.insertText = name, .insertText = name,
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| .documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs|
.{ .kind = .Markdown, .value = docs } types.MarkupContent{ .kind = .Markdown, .value = docs }
else else
null, null,
}, {}); }, {});
@ -2927,15 +2938,12 @@ fn makeScopeInternal(
std.debug.assert(token_tags[name_token] == .identifier); std.debug.assert(token_tags[name_token] == .identifier);
const name = tree.tokenSlice(name_token); const name = tree.tokenSlice(name_token);
try scope.decls.putNoClobber(name, if (is_for) try scope.decls.putNoClobber(name, if (is_for) .{
.{
.array_payload = .{ .array_payload = .{
.identifier = name_token, .identifier = name_token,
.array_expr = while_node.ast.cond_expr, .array_expr = while_node.ast.cond_expr,
}, },
} } else .{
else
.{
.pointer_payload = .{ .pointer_payload = .{
.name = name_token, .name = name_token,
.condition = while_node.ast.cond_expr, .condition = while_node.ast.cond_expr,

View File

@ -230,6 +230,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han
}); });
} }
// TODO: style warnings for types, values and declarations below root scope
if (tree.errors.len == 0) { if (tree.errors.len == 0) {
for (tree.rootDecls()) |decl_idx| { for (tree.rootDecls()) |decl_idx| {
const decl = tree.nodes.items(.tag)[decl_idx]; const decl = tree.nodes.items(.tag)[decl_idx];
@ -643,7 +644,7 @@ fn hoverSymbol(
}, },
.param_decl => |param| def: { .param_decl => |param| def: {
if (param.first_doc_comment) |doc_comments| { if (param.first_doc_comment) |doc_comments| {
doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind); doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind, false);
} }
const first_token = param.first_doc_comment orelse const first_token = param.first_doc_comment orelse
@ -959,7 +960,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
const doc = if (param.first_doc_comment) |doc_comments| const doc = if (param.first_doc_comment) |doc_comments|
types.MarkupContent{ types.MarkupContent{
.kind = doc_kind, .kind = doc_kind,
.value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind), .value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind, false),
} }
else else
null; null;

View File

@ -321,89 +321,49 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan
} }
} }
fn writeContainerField(
builder: *Builder,
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
node: ast.Node.Index,
field_token_type: ?TokenType,
child_frame: anytype,
) !void {
const tree = builder.handle.tree;
const container_field = containerField(tree, node).?;
const base = tree.nodes.items(.main_token)[node];
const tokens = tree.tokens.items(.tag);
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
try writeDocComments(builder, tree, docs);
try writeToken(builder, container_field.comptime_token, .keyword);
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
if (container_field.ast.type_expr != 0) {
if (container_field.ast.align_expr != 0) {
try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr });
}
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr });
}
if (container_field.ast.value_expr != 0) block: {
const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0)
lastToken(tree, container_field.ast.type_expr) + 1
else if (container_field.ast.align_expr != 0)
lastToken(tree, container_field.ast.align_expr) + 1
else
break :block; // Check this, I believe it is correct.
try writeToken(builder, eq_tok, .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr });
}
}
// TODO This is very slow and does a lot of extra work, improve in the future.
fn writeNodeTokens( fn writeNodeTokens(
builder: *Builder, builder: *Builder,
arena: *std.heap.ArenaAllocator, arena: *std.heap.ArenaAllocator,
store: *DocumentStore, store: *DocumentStore,
maybe_node: ?ast.Node.Index, maybe_node: ?ast.Node.Index,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
if (maybe_node == null) return; const start_node = maybe_node orelse return;
const node = maybe_node.?;
if (node == 0) return;
const handle = builder.handle; const handle = builder.handle;
const tree = handle.tree; const tree = handle.tree;
const node_tags = tree.nodes.items(.tag); const node_tags = tree.nodes.items(.tag);
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const datas = tree.nodes.items(.data); const node_data = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token); const main_tokens = tree.nodes.items(.main_token);
if (node > datas.len) return; if (start_node > node_data.len) return;
var stack = std.ArrayList(ast.Node.Index).init(arena.child_allocator);
defer stack.deinit();
try stack.append(start_node);
while (stack.popOrNull()) |node| {
if (node == 0 or node > node_data.len) continue;
const tag = node_tags[node]; const tag = node_tags[node];
const main_token = main_tokens[node]; const main_token = main_tokens[node];
const FrameSize = @sizeOf(@Frame(writeNodeTokens));
var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
defer arena.child_allocator.free(child_frame);
switch (tag) { switch (tag) {
.root => unreachable, .root => unreachable,
.container_field, .container_field,
.container_field_align, .container_field_align,
.container_field_init, .container_field_init,
=> try writeContainerField(builder, arena, store, node, .field, child_frame), => try writeContainerField(builder, arena, store, node, .field, &stack),
.@"errdefer" => { .@"errdefer" => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
if (datas[node].lhs != 0) { if (node_data[node].lhs != 0) {
const payload_tok = datas[node].lhs; const payload_tok = node_data[node].lhs;
try writeToken(builder, payload_tok - 1, .operator); try writeToken(builder, payload_tok - 1, .operator);
try writeToken(builder, payload_tok, .variable); try writeToken(builder, payload_tok, .variable);
try writeToken(builder, payload_tok + 1, .operator); try writeToken(builder, payload_tok + 1, .operator);
} }
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.block, .block,
.block_semicolon, .block_semicolon,
@ -417,12 +377,12 @@ fn writeNodeTokens(
var gap_highlighter = GapHighlighter.init(builder, first_tok); var gap_highlighter = GapHighlighter.init(builder, first_tok);
const statements: []const ast.Node.Index = switch (tag) { const statements: []const ast.Node.Index = switch (tag) {
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs], .block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
.block_two, .block_two_semicolon => blk: { .block_two, .block_two_semicolon => blk: {
const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; const statements = &[_]ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
const len: usize = if (datas[node].lhs == 0) const len: usize = if (node_data[node].lhs == 0)
@as(usize, 0) @as(usize, 0)
else if (datas[node].rhs == 0) else if (node_data[node].rhs == 0)
@as(usize, 1) @as(usize, 1)
else else
@as(usize, 2); @as(usize, 2);
@ -434,9 +394,9 @@ fn writeNodeTokens(
for (statements) |child| { for (statements) |child| {
try gap_highlighter.next(child); try gap_highlighter.next(child);
if (node_tags[child].isContainerField()) { if (node_tags[child].isContainerField()) {
try writeContainerField(builder, arena, store, child, .field, child_frame); try writeContainerField(builder, arena, store, child, .field, &stack);
} else { } else {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); try stack.append(child);
} }
} }
@ -449,7 +409,7 @@ fn writeNodeTokens(
=> { => {
const var_decl = varDecl(tree, node).?; const var_decl = varDecl(tree, node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
try writeDocComments(builder, handle.tree, comment_idx); try writeDocComments(builder, tree, comment_idx);
try writeToken(builder, var_decl.visib_token, .keyword); try writeToken(builder, var_decl.visib_token, .keyword);
try writeToken(builder, var_decl.extern_export_token, .keyword); try writeToken(builder, var_decl.extern_export_token, .keyword);
@ -464,22 +424,22 @@ fn writeNodeTokens(
} }
if (var_decl.ast.type_node != 0) if (var_decl.ast.type_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node }); try stack.append(var_decl.ast.type_node);
if (var_decl.ast.align_node != 0) if (var_decl.ast.align_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node }); try stack.append(var_decl.ast.align_node);
if (var_decl.ast.section_node != 0) if (var_decl.ast.section_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node }); try stack.append(var_decl.ast.section_node);
try writeToken(builder, var_decl.ast.mut_token + 2, .operator); try writeToken(builder, var_decl.ast.mut_token + 2, .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node }); try stack.append(var_decl.ast.init_node);
}, },
.@"usingnamespace" => { .@"usingnamespace" => {
const first_tok = tree.firstToken(node); const first_tok = tree.firstToken(node);
if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment) if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment)
try writeDocComments(builder, builder.handle.tree, first_tok - 1); try writeDocComments(builder, tree, first_tok - 1);
try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword); try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.container_decl, .container_decl,
.container_decl_trailing, .container_decl_trailing,
@ -509,31 +469,31 @@ fn writeNodeTokens(
try writeToken(builder, decl.ast.main_token, .keyword); try writeToken(builder, decl.ast.main_token, .keyword);
if (decl.ast.enum_token) |enum_token| { if (decl.ast.enum_token) |enum_token| {
if (decl.ast.arg != 0) if (decl.ast.arg != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }) try stack.append(decl.ast.arg)
else else
try writeToken(builder, enum_token, .keyword); try writeToken(builder, enum_token, .keyword);
} else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }); } else if (decl.ast.arg != 0) try stack.append(decl.ast.arg);
var gap_highlighter = GapHighlighter.init(builder, main_token + 1); var gap_highlighter = GapHighlighter.init(builder, main_token + 1);
const field_token_type = fieldTokenType(node, handle); const field_token_type = fieldTokenType(node, handle);
for (decl.ast.members) |child| { for (decl.ast.members) |child| {
try gap_highlighter.next(child); try gap_highlighter.next(child);
if (node_tags[child].isContainerField()) { if (node_tags[child].isContainerField()) {
try writeContainerField(builder, arena, store, child, field_token_type, child_frame); try writeContainerField(builder, arena, store, child, field_token_type, &stack);
} else { } else {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); try stack.append(child);
} }
} }
try gap_highlighter.end(lastToken(tree, node)); try gap_highlighter.end(lastToken(tree, node));
}, },
.error_value => { .error_value => {
if (datas[node].lhs != 0) { if (node_data[node].lhs != 0) {
try writeToken(builder, datas[node].lhs - 1, .keyword); try writeToken(builder, node_data[node].lhs - 1, .keyword);
} }
try writeToken(builder, datas[node].rhs, .errorTag); try writeToken(builder, node_data[node].rhs, .errorTag);
}, },
.identifier => { .identifier => {
if (analysis.isTypeIdent(handle.tree, main_token)) { if (analysis.isTypeIdent(tree, main_token)) {
return try writeToken(builder, main_token, .type); return try writeToken(builder, main_token, .type);
} }
@ -541,8 +501,8 @@ fn writeNodeTokens(
store, store,
arena, arena,
handle, handle,
handle.tree.getNodeSource(node), tree.getNodeSource(node),
handle.tree.tokens.items(.start)[main_token], tree.tokens.items(.start)[main_token],
)) |child| { )) |child| {
if (child.decl.* == .param_decl) { if (child.decl.* == .param_decl) {
return try writeToken(builder, main_token, .parameter); return try writeToken(builder, main_token, .parameter);
@ -564,19 +524,19 @@ fn writeNodeTokens(
var buf: [1]ast.Node.Index = undefined; var buf: [1]ast.Node.Index = undefined;
const fn_proto: ast.full.FnProto = fnProto(tree, node, &buf).?; const fn_proto: ast.full.FnProto = fnProto(tree, node, &buf).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
try writeDocComments(builder, handle.tree, docs); try writeDocComments(builder, tree, docs);
try writeToken(builder, fn_proto.visib_token, .keyword); try writeToken(builder, fn_proto.visib_token, .keyword);
try writeToken(builder, fn_proto.extern_export_token, .keyword); try writeToken(builder, fn_proto.extern_export_token, .keyword);
try writeToken(builder, fn_proto.lib_name, .string); try writeToken(builder, fn_proto.lib_name, .string);
try writeToken(builder, fn_proto.ast.fn_token, .keyword); try writeToken(builder, fn_proto.ast.fn_token, .keyword);
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto))
.type .type
else else
.function; .function;
const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) const tok_mod = if (analysis.isGenericFunction(tree, fn_proto))
TokenModifiers{ .generic = true } TokenModifiers{ .generic = true }
else else
TokenModifiers{}; TokenModifiers{};
@ -585,59 +545,59 @@ fn writeNodeTokens(
var it = fn_proto.iterate(tree); var it = fn_proto.iterate(tree);
while (it.next()) |param_decl| { while (it.next()) |param_decl| {
if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, handle.tree, docs); if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, tree, docs);
try writeToken(builder, param_decl.comptime_noalias, .keyword); try writeToken(builder, param_decl.comptime_noalias, .keyword);
try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true });
if (param_decl.anytype_ellipsis3) |any_token| { if (param_decl.anytype_ellipsis3) |any_token| {
try writeToken(builder, any_token, .type); try writeToken(builder, any_token, .type);
} else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr }); } else if (param_decl.type_expr != 0) try stack.append(param_decl.type_expr);
} }
if (fn_proto.ast.align_expr != 0) if (fn_proto.ast.align_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr }); try stack.append(fn_proto.ast.align_expr);
if (fn_proto.ast.section_expr != 0) if (fn_proto.ast.section_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr }); try stack.append(fn_proto.ast.section_expr);
if (fn_proto.ast.callconv_expr != 0) if (fn_proto.ast.callconv_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr }); try stack.append(fn_proto.ast.callconv_expr);
if (fn_proto.ast.return_type != 0) if (fn_proto.ast.return_type != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type }); try stack.append(fn_proto.ast.return_type);
if (tag == .fn_decl) if (tag == .fn_decl)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.anyframe_type => { .anyframe_type => {
try writeToken(builder, main_token, .type); try writeToken(builder, main_token, .type);
if (datas[node].rhs != 0) { if (node_data[node].rhs != 0) {
try writeToken(builder, datas[node].lhs, .type); try writeToken(builder, node_data[node].lhs, .type);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
} }
}, },
.@"defer" => { .@"defer" => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.@"comptime", .@"comptime",
.@"nosuspend", .@"nosuspend",
=> { => {
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
try writeDocComments(builder, handle.tree, doc); try writeDocComments(builder, tree, doc);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.@"switch", .@"switch",
.switch_comma, .switch_comma,
=> { => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); const extra = tree.extraData(node_data[node].rhs, ast.Node.SubRange);
const cases = tree.extra_data[extra.start..extra.end]; const cases = tree.extra_data[extra.start..extra.end];
var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, datas[node].lhs) + 1); var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, node_data[node].lhs) + 1);
for (cases) |case_node| { for (cases) |case_node| {
try gap_highlighter.next(case_node); try gap_highlighter.next(case_node);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); try stack.append(case_node);
} }
try gap_highlighter.end(lastToken(tree, node)); try gap_highlighter.end(lastToken(tree, node));
}, },
@ -645,7 +605,7 @@ fn writeNodeTokens(
.switch_case, .switch_case,
=> { => {
const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node); const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node);
for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); for (switch_case.ast.values) |item_node| try stack.append(item_node);
// check it it's 'else' // check it it's 'else'
if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword); if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword);
try writeToken(builder, switch_case.ast.arrow_token, .operator); try writeToken(builder, switch_case.ast.arrow_token, .operator);
@ -653,7 +613,7 @@ fn writeNodeTokens(
const p_token = @boolToInt(token_tags[payload_token] == .asterisk); const p_token = @boolToInt(token_tags[payload_token] == .asterisk);
try writeToken(builder, p_token, .variable); try writeToken(builder, p_token, .variable);
} }
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr }); try stack.append(switch_case.ast.target_expr);
}, },
.@"while", .@"while",
.while_simple, .while_simple,
@ -665,7 +625,7 @@ fn writeNodeTokens(
try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.label_token, .label);
try writeToken(builder, while_node.inline_token, .keyword); try writeToken(builder, while_node.inline_token, .keyword);
try writeToken(builder, while_node.ast.while_token, .keyword); try writeToken(builder, while_node.ast.while_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr }); try stack.append(while_node.ast.cond_expr);
if (while_node.payload_token) |payload| { if (while_node.payload_token) |payload| {
try writeToken(builder, payload - 1, .operator); try writeToken(builder, payload - 1, .operator);
try writeToken(builder, payload, .variable); try writeToken(builder, payload, .variable);
@ -678,9 +638,9 @@ fn writeNodeTokens(
try writeToken(builder, r_pipe, .operator); try writeToken(builder, r_pipe, .operator);
} }
if (while_node.ast.cont_expr != 0) if (while_node.ast.cont_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr }); try stack.append(while_node.ast.cont_expr);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr }); try stack.append(while_node.ast.then_expr);
if (while_node.ast.else_expr != 0) { if (while_node.ast.else_expr != 0) {
try writeToken(builder, while_node.else_token, .keyword); try writeToken(builder, while_node.else_token, .keyword);
@ -690,7 +650,7 @@ fn writeNodeTokens(
try writeToken(builder, err_token, .variable); try writeToken(builder, err_token, .variable);
try writeToken(builder, err_token + 1, .operator); try writeToken(builder, err_token + 1, .operator);
} }
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr }); try stack.append(while_node.ast.else_expr);
} }
}, },
.@"if", .@"if",
@ -699,7 +659,7 @@ fn writeNodeTokens(
const if_node = ifFull(tree, node); const if_node = ifFull(tree, node);
try writeToken(builder, if_node.ast.if_token, .keyword); try writeToken(builder, if_node.ast.if_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); try stack.append(if_node.ast.cond_expr);
if (if_node.payload_token) |payload| { if (if_node.payload_token) |payload| {
// if (?x) |x| // if (?x) |x|
@ -707,7 +667,7 @@ fn writeNodeTokens(
try writeToken(builder, payload, .variable); // x try writeToken(builder, payload, .variable); // x
try writeToken(builder, payload + 1, .operator); // | try writeToken(builder, payload + 1, .operator); // |
} }
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr }); try stack.append(if_node.ast.then_expr);
if (if_node.ast.else_expr != 0) { if (if_node.ast.else_expr != 0) {
try writeToken(builder, if_node.else_token, .keyword); try writeToken(builder, if_node.else_token, .keyword);
@ -717,7 +677,7 @@ fn writeNodeTokens(
try writeToken(builder, err_token, .variable); // err try writeToken(builder, err_token, .variable); // err
try writeToken(builder, err_token + 1, .operator); // | try writeToken(builder, err_token + 1, .operator); // |
} }
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr }); try stack.append(if_node.ast.else_expr);
} }
}, },
.array_init, .array_init,
@ -739,8 +699,8 @@ fn writeNodeTokens(
}; };
if (array_init.ast.type_expr != 0) if (array_init.ast.type_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr }); try stack.append(array_init.ast.type_expr);
for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); for (array_init.ast.elements) |elem| try stack.append(elem);
}, },
.struct_init, .struct_init,
.struct_init_comma, .struct_init_comma,
@ -763,7 +723,7 @@ fn writeNodeTokens(
var field_token_type: ?TokenType = null; var field_token_type: ?TokenType = null;
if (struct_init.ast.type_expr != 0) { if (struct_init.ast.type_expr != 0) {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr }); try stack.append(struct_init.ast.type_expr);
field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{
.node = struct_init.ast.type_expr, .node = struct_init.ast.type_expr,
@ -785,7 +745,7 @@ fn writeNodeTokens(
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
try writeToken(builder, init_token - 1, .operator); // '=' try writeToken(builder, init_token - 1, .operator); // '='
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); try stack.append(field_init);
} }
try gap_highlighter.end(lastToken(tree, node)); try gap_highlighter.end(lastToken(tree, node));
}, },
@ -806,14 +766,14 @@ fn writeNodeTokens(
}; };
try writeToken(builder, call.async_token, .keyword); try writeToken(builder, call.async_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr }); try stack.append(call.ast.fn_expr);
if (builder.current_token) |curr_tok| { if (builder.current_token) |curr_tok| {
if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) { if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) {
try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function); try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function);
} }
} }
for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); for (call.ast.params) |param| try stack.append(param);
}, },
.slice, .slice,
.slice_open, .slice_open,
@ -826,43 +786,43 @@ fn writeNodeTokens(
else => unreachable, else => unreachable,
}; };
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced }); try stack.append(slice.ast.sliced);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start }); try stack.append(slice.ast.start);
try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator); try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator);
if (slice.ast.end != 0) if (slice.ast.end != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end }); try stack.append(slice.ast.end);
if (slice.ast.sentinel != 0) if (slice.ast.sentinel != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel }); try stack.append(slice.ast.sentinel);
}, },
.array_access => { .array_access => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.deref => { .deref => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
try writeToken(builder, main_token, .operator); try writeToken(builder, main_token, .operator);
}, },
.unwrap_optional => { .unwrap_optional => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
try writeToken(builder, main_token + 1, .operator); try writeToken(builder, main_token + 1, .operator);
}, },
.grouped_expression => { .grouped_expression => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.@"break", .@"break",
.@"continue", .@"continue",
=> { => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
if (datas[node].lhs != 0) if (node_data[node].lhs != 0)
try writeToken(builder, datas[node].lhs, .label); try writeToken(builder, node_data[node].lhs, .label);
if (datas[node].rhs != 0) if (node_data[node].rhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.@"suspend", .@"return" => { .@"suspend", .@"return" => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
if (datas[node].lhs != 0) if (node_data[node].lhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.integer_literal, .integer_literal,
.float_literal, .float_literal,
@ -878,7 +838,7 @@ fn writeNodeTokens(
.builtin_call_two, .builtin_call_two,
.builtin_call_two_comma, .builtin_call_two_comma,
=> { => {
const data = datas[node]; const data = node_data[node];
const params = switch (tag) { const params = switch (tag) {
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
@ -892,7 +852,7 @@ fn writeNodeTokens(
try writeToken(builder, main_token, .builtin); try writeToken(builder, main_token, .builtin);
for (params) |param| for (params) |param|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); try stack.append(param);
}, },
.string_literal, .string_literal,
.char_literal, .char_literal,
@ -901,7 +861,7 @@ fn writeNodeTokens(
}, },
.multiline_string_literal => { .multiline_string_literal => {
var cur_tok = main_token; var cur_tok = main_token;
const last_tok = datas[node].rhs; const last_tok = node_data[node].rhs;
while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string); while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string);
}, },
@ -929,7 +889,7 @@ fn writeNodeTokens(
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try writeToken(builder, asm_node.volatile_token, .keyword); try writeToken(builder, asm_node.volatile_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template }); try stack.append(asm_node.ast.template);
// TODO Inputs, outputs. // TODO Inputs, outputs.
}, },
.@"anytype" => { .@"anytype" => {
@ -937,20 +897,20 @@ fn writeNodeTokens(
}, },
.test_decl => { .test_decl => {
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
try writeDocComments(builder, handle.tree, doc); try writeDocComments(builder, tree, doc);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
if (token_tags[main_token + 1] == .string_literal) if (token_tags[main_token + 1] == .string_literal)
try writeToken(builder, main_token + 1, .string); try writeToken(builder, main_token + 1, .string);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.@"catch" => { .@"catch" => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
if (token_tags[main_token + 1] == .pipe) if (token_tags[main_token + 1] == .pipe)
try writeToken(builder, main_token + 1, .variable); try writeToken(builder, main_token + 1, .variable);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.add, .add,
.add_wrap, .add_wrap,
@ -994,22 +954,22 @@ fn writeNodeTokens(
.sub_wrap, .sub_wrap,
.@"orelse", .@"orelse",
=> { => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
const token_type: TokenType = switch (tag) { const token_type: TokenType = switch (tag) {
.bool_and, .bool_or => .keyword, .bool_and, .bool_or => .keyword,
else => .operator, else => .operator,
}; };
try writeToken(builder, main_token, token_type); try writeToken(builder, main_token, token_type);
if (datas[node].rhs != 0) if (node_data[node].rhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); try stack.append(node_data[node].rhs);
}, },
.field_access => { .field_access => {
const data = datas[node]; const data = node_data[node];
if (data.rhs == 0) return; if (data.rhs == 0) return;
const rhs_str = tree.tokenSlice(data.rhs); const rhs_str = tree.tokenSlice(data.rhs);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs }); try stack.append(data.lhs);
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
// writeToken code. // writeToken code.
@ -1066,22 +1026,12 @@ fn writeNodeTokens(
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
main_token == main_tokens[ptr_type.ast.child_type]) main_token == main_tokens[ptr_type.ast.child_type])
{ {
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ return try stack.append(ptr_type.ast.child_type);
builder,
arena,
store,
ptr_type.ast.child_type,
});
} }
if (ptr_type.size == .One) try writeToken(builder, main_token, .operator); if (ptr_type.size == .One) try writeToken(builder, main_token, .operator);
if (ptr_type.ast.sentinel != 0) { if (ptr_type.ast.sentinel != 0) {
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ return try stack.append(ptr_type.ast.sentinel);
builder,
arena,
store,
ptr_type.ast.sentinel,
});
} }
try writeToken(builder, ptr_type.allowzero_token, .keyword); try writeToken(builder, ptr_type.allowzero_token, .keyword);
@ -1089,19 +1039,19 @@ fn writeNodeTokens(
if (ptr_type.ast.align_node != 0) { if (ptr_type.ast.align_node != 0) {
const first_tok = tree.firstToken(ptr_type.ast.align_node); const first_tok = tree.firstToken(ptr_type.ast.align_node);
try writeToken(builder, first_tok - 2, .keyword); try writeToken(builder, first_tok - 2, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node }); try stack.append(ptr_type.ast.align_node);
if (ptr_type.ast.bit_range_start != 0) { if (ptr_type.ast.bit_range_start != 0) {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start }); try stack.append(ptr_type.ast.bit_range_start);
try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator); try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end }); try stack.append(ptr_type.ast.bit_range_end);
} }
} }
try writeToken(builder, ptr_type.const_token, .keyword); try writeToken(builder, ptr_type.const_token, .keyword);
try writeToken(builder, ptr_type.volatile_token, .keyword); try writeToken(builder, ptr_type.volatile_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type }); try stack.append(ptr_type.ast.child_type);
}, },
.array_type, .array_type,
.array_type_sentinel, .array_type_sentinel,
@ -1111,11 +1061,11 @@ fn writeNodeTokens(
else else
tree.arrayTypeSentinel(node); tree.arrayTypeSentinel(node);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count }); try stack.append(array_type.ast.elem_count);
if (array_type.ast.sentinel != 0) if (array_type.ast.sentinel != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.sentinel }); try stack.append(array_type.ast.sentinel);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type }); try stack.append(array_type.ast.elem_type);
}, },
.address_of, .address_of,
.bit_not, .bit_not,
@ -1125,18 +1075,59 @@ fn writeNodeTokens(
.negation_wrap, .negation_wrap,
=> { => {
try writeToken(builder, main_token, .operator); try writeToken(builder, main_token, .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.@"try", .@"try",
.@"resume", .@"resume",
.@"await", .@"await",
=> { => {
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try stack.append(node_data[node].lhs);
}, },
.anyframe_literal => try writeToken(builder, main_token, .keyword), .anyframe_literal => try writeToken(builder, main_token, .keyword),
} }
} }
}
fn writeContainerField(
builder: *Builder,
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
node: ast.Node.Index,
field_token_type: ?TokenType,
stack: *std.ArrayList(ast.Node.Index),
) !void {
const tree = builder.handle.tree;
const container_field = containerField(tree, node).?;
const base = tree.nodes.items(.main_token)[node];
const tokens = tree.tokens.items(.tag);
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
try writeDocComments(builder, tree, docs);
try writeToken(builder, container_field.comptime_token, .keyword);
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
if (container_field.ast.type_expr != 0) {
if (container_field.ast.align_expr != 0) {
try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
try stack.append(container_field.ast.align_expr);
}
try stack.append(container_field.ast.type_expr);
}
if (container_field.ast.value_expr != 0) block: {
const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0)
lastToken(tree, container_field.ast.type_expr) + 1
else if (container_field.ast.align_expr != 0)
lastToken(tree, container_field.ast.align_expr) + 1
else
break :block; // Check this, I believe it is correct.
try writeToken(builder, eq_tok, .operator);
try stack.append(container_field.ast.value_expr);
}
}
// TODO Range version, edit version. // TODO Range version, edit version.
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {

View File

@ -23,15 +23,7 @@ fn fnProtoToSignatureInfo(
const token_starts = tree.tokens.items(.start); const token_starts = tree.tokens.items(.start);
const alloc = &arena.allocator; const alloc = &arena.allocator;
const label = analysis.getFunctionSignature(tree, proto); const label = analysis.getFunctionSignature(tree, proto);
const proto_comments = types.MarkupContent{ .value = if (try analysis.getDocComments( const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .Markdown)) orelse "";
alloc,
tree,
fn_node,
.Markdown,
)) |dc|
dc
else
"" };
const arg_idx = if (skip_self_param) blk: { const arg_idx = if (skip_self_param) blk: {
const has_self_param = try analysis.hasSelfParam(arena, document_store, handle, proto); const has_self_param = try analysis.hasSelfParam(arena, document_store, handle, proto);
@ -42,14 +34,9 @@ fn fnProtoToSignatureInfo(
var param_it = proto.iterate(tree); var param_it = proto.iterate(tree);
while (param_it.next()) |param| { while (param_it.next()) |param| {
const param_comments = if (param.first_doc_comment) |dc| const param_comments = if (param.first_doc_comment) |dc|
types.MarkupContent{ .value = try analysis.collectDocComments( try analysis.collectDocComments(alloc, tree, dc, .Markdown, false)
alloc,
tree,
dc,
.Markdown,
) }
else else
null; "";
var param_label_start: usize = 0; var param_label_start: usize = 0;
var param_label_end: usize = 0; var param_label_end: usize = 0;
@ -77,12 +64,12 @@ fn fnProtoToSignatureInfo(
const param_label = tree.source[param_label_start..param_label_end]; const param_label = tree.source[param_label_start..param_label_end];
try params.append(alloc, .{ try params.append(alloc, .{
.label = param_label, .label = param_label,
.documentation = param_comments, .documentation = types.MarkupContent{ .value = param_comments },
}); });
} }
return types.SignatureInformation{ return types.SignatureInformation{
.label = label, .label = label,
.documentation = proto_comments, .documentation = types.MarkupContent{ .value = proto_comments },
.parameters = params.items, .parameters = params.items,
.activeParameter = arg_idx, .activeParameter = arg_idx,
}; };

View File

@ -189,9 +189,76 @@ test "Request completion with no trailing whitespace" {
); );
} }
test "Encoded space in file name and usingnamespace on non-existing symbol" {
var server = try Server.start(initialize_msg, null);
defer server.shutdown();
try server.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///%20test.zig","languageId":"zig","version":420,"text":"usingnamespace a.b;\nb."}}
, null);
try server.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///%20test.zig"}, "position":{"line":1,"character":2}}
,
\\{"isIncomplete":false,"items":[]}
);
}
test "Self-referential definition" {
var server = try Server.start(initialize_msg, null);
defer server.shutdown();
try server.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const h = h(0);\nc"}}
, null);
try server.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
,
\\{"isIncomplete":false,"items":[{"label":"h","kind":21,"textEdit":null,"filterText":null,"insertText":"h","insertTextFormat":1,"detail":"const h = h(0)","documentation":null}]}
);
}
test "Missing return type" {
var server = try Server.start(initialize_msg, null);
defer server.shutdown();
try server.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"fn w() {}\nc"}}
, null);
try server.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
,
\\{"isIncomplete":false,"items":[{"label":"w","kind":3,"textEdit":null,"filterText":null,"insertText":"w","insertTextFormat":1,"detail":"fn","documentation":null}]}
);
}
test "Pointer and optional deref" {
var server = try Server.start(initialize_msg, null);
defer server.shutdown();
try server.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"var value: ?struct { data: i32 = 5 } = null;const ptr = &value;\nconst a = ptr.*.?."}}
, null);
try server.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":18}}
,
\\{"isIncomplete":false,"items":[{"label":"data","kind":5,"textEdit":null,"filterText":null,"insertText":"data","insertTextFormat":1,"detail":"data: i32 = 5","documentation":null}]}
);
}
test "Request utf-8 offset encoding" { test "Request utf-8 offset encoding" {
var server = try Server.start(initialize_msg_offs, var server = try Server.start(initialize_msg_offs,
\\{"offsetEncoding":"utf-8","capabilities":{"signatureHelpProvider":{"triggerCharacters":["("],"retriggerCharacters":[","]},"textDocumentSync":1,"renameProvider":true,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":true,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":true,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true,"workspace":{"workspaceFolders":{"supported":false,"changeNotifications":false}},"semanticTokensProvider":{"full":true,"range":false,"legend":{"tokenTypes":["type","parameter","variable","enumMember","field","errorTag","function","keyword","comment","string","number","operator","builtin","label","keywordLiteral"],"tokenModifiers":["namespace","struct","enum","union","opaque","declaration","async","documentation","generic"]}}},"serverInfo":{"name":"zls","version":"0.1.0"}} \\{"offsetEncoding":"utf-8","capabilities":{"signatureHelpProvider":{"triggerCharacters":["("],"retriggerCharacters":[","]},"textDocumentSync":1,"renameProvider":true,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":true,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":true,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true,"workspace":{"workspaceFolders":{"supported":false,"changeNotifications":false}},"semanticTokensProvider":{"full":true,"range":false,"legend":{"tokenTypes":["type","parameter","variable","enumMember","field","errorTag","function","keyword","comment","string","number","operator","builtin","label","keywordLiteral"],"tokenModifiers":["namespace","struct","enum","union","opaque","declaration","async","documentation","generic"]}}},"serverInfo":{"name":"zls","version":"0.1.0"}}
); );
server.shutdown(); server.shutdown();
} }
// not fixed yet!
// test "Self-referential import" {
// var server = try Server.start(initialize_msg, null);
// defer server.shutdown();
// try server.request("textDocument/didOpen",
// \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const a = @import(\"test.zig\").a;\nc"}}
// , null);
// try server.request("textDocument/completion",
// \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
// ,
// \\{"isIncomplete":false,"items":[]}
// );
// }