Merge pull request #308 from InterplanetaryEngineer/master
Fix inclusion of toplevel doc comments, remove @async recursion in writeNodeTokens, add a few regression tests
This commit is contained in:
commit
8f868dfec6
@ -17,12 +17,7 @@ pub fn deinit() void {
|
||||
resolve_trail.deinit();
|
||||
}
|
||||
|
||||
/// Gets a declaration's doc comments, caller must free memory when a value is returned
|
||||
/// Like:
|
||||
///```zig
|
||||
///var comments = getFunctionDocComments(allocator, tree, func);
|
||||
///defer if (comments) |comments_pointer| allocator.free(comments_pointer);
|
||||
///```
|
||||
/// Gets a declaration's doc comments. Caller owns returned memory.
|
||||
pub fn getDocComments(
|
||||
allocator: *std.mem.Allocator,
|
||||
tree: ast.Tree,
|
||||
@ -30,15 +25,32 @@ pub fn getDocComments(
|
||||
format: types.MarkupContent.Kind,
|
||||
) !?[]const u8 {
|
||||
const base = tree.nodes.items(.main_token)[node];
|
||||
const base_kind = tree.nodes.items(.tag)[node];
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
|
||||
if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index| {
|
||||
return try collectDocComments(allocator, tree, doc_comment_index, format);
|
||||
switch (base_kind) {
|
||||
// As far as I know, this does not actually happen yet, but it
|
||||
// may come in useful.
|
||||
.root => return try collectDocComments(allocator, tree, 0, format, true),
|
||||
.fn_proto,
|
||||
.fn_proto_one,
|
||||
.fn_proto_simple,
|
||||
.fn_proto_multi,
|
||||
.fn_decl,
|
||||
.local_var_decl,
|
||||
.global_var_decl,
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index|
|
||||
return try collectDocComments(allocator, tree, doc_comment_index, format, false);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get a declaration's doc comment token index
|
||||
/// Get the first doc comment of a declaration.
|
||||
pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.TokenIndex) ?ast.TokenIndex {
|
||||
var idx = base_token;
|
||||
if (idx == 0) return null;
|
||||
@ -50,9 +62,9 @@ pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.Toke
|
||||
if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1;
|
||||
|
||||
// Find first doc comment token
|
||||
if (!(tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment))
|
||||
if (!(tokens[idx] == .doc_comment))
|
||||
return null;
|
||||
return while (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) {
|
||||
return while (tokens[idx] == .doc_comment) {
|
||||
if (idx == 0) break 0;
|
||||
idx -= 1;
|
||||
} else idx + 1;
|
||||
@ -63,6 +75,7 @@ pub fn collectDocComments(
|
||||
tree: ast.Tree,
|
||||
doc_comments: ast.TokenIndex,
|
||||
format: types.MarkupContent.Kind,
|
||||
container_doc: bool,
|
||||
) ![]const u8 {
|
||||
var lines = std.ArrayList([]const u8).init(allocator);
|
||||
defer lines.deinit();
|
||||
@ -70,28 +83,27 @@ pub fn collectDocComments(
|
||||
|
||||
var curr_line_tok = doc_comments;
|
||||
while (true) : (curr_line_tok += 1) {
|
||||
switch (tokens[curr_line_tok]) {
|
||||
.doc_comment, .container_doc_comment => {
|
||||
const comm = tokens[curr_line_tok];
|
||||
if ((container_doc and comm == .container_doc_comment) or (!container_doc and comm == .doc_comment)) {
|
||||
try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces));
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
} else break;
|
||||
}
|
||||
|
||||
return try std.mem.join(allocator, if (format == .Markdown) " \n" else "\n", lines.items);
|
||||
}
|
||||
|
||||
/// Gets a function signature (keywords, name, return value)
|
||||
/// Gets a function's keyword, name, arguments and return value.
|
||||
pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 {
|
||||
const start = offsets.tokenLocation(tree, func.ast.fn_token);
|
||||
// return type can be 0 when user wrote incorrect fn signature
|
||||
// to ensure we don't break, just end the signature at end of fn token
|
||||
if (func.ast.return_type == 0) return tree.source[start.start..start.end];
|
||||
const end = offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type)).end;
|
||||
return tree.source[start.start..end];
|
||||
|
||||
const end = if (func.ast.return_type != 0)
|
||||
offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type))
|
||||
else
|
||||
start;
|
||||
return tree.source[start.start..end.end];
|
||||
}
|
||||
|
||||
/// Gets a function snippet insert text
|
||||
/// Creates snippet insert text for a function. Caller owns returned memory.
|
||||
pub fn getFunctionSnippet(
|
||||
allocator: *std.mem.Allocator,
|
||||
tree: ast.Tree,
|
||||
@ -197,7 +209,6 @@ pub fn hasSelfParam(
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Gets a function signature (keywords, name, return value)
|
||||
pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 {
|
||||
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
||||
const end = offsets.tokenLocation(tree, lastToken(tree, var_decl.ast.init_node)).end;
|
||||
@ -232,14 +243,19 @@ pub fn isGenericFunction(tree: ast.Tree, func: ast.full.FnProto) bool {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// STYLE
|
||||
|
||||
pub fn isCamelCase(name: []const u8) bool {
|
||||
return !std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null;
|
||||
return !std.ascii.isUpper(name[0]) and !isSnakeCase(name);
|
||||
}
|
||||
|
||||
pub fn isPascalCase(name: []const u8) bool {
|
||||
return std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null;
|
||||
return std.ascii.isUpper(name[0]) and !isSnakeCase(name);
|
||||
}
|
||||
|
||||
pub fn isSnakeCase(name: []const u8) bool {
|
||||
return std.mem.indexOf(u8, name, "_") != null;
|
||||
}
|
||||
|
||||
// ANALYSIS ENGINE
|
||||
@ -499,8 +515,7 @@ pub fn resolveReturnType(
|
||||
.type = .{ .data = .{ .error_union = child_type_node }, .is_type_val = false },
|
||||
.handle = child_type.handle,
|
||||
};
|
||||
} else
|
||||
return child_type.instanceTypeVal();
|
||||
} else return child_type.instanceTypeVal();
|
||||
}
|
||||
|
||||
/// Resolves the child type of an optional type
|
||||
@ -698,8 +713,6 @@ pub fn resolveTypeOfNodeInternal(
|
||||
if (std.meta.eql(i, node_handle))
|
||||
return null;
|
||||
}
|
||||
// We use the backing allocator here because the ArrayList expects its
|
||||
// allocated memory to persist while it is empty.
|
||||
try resolve_trail.append(node_handle);
|
||||
defer _ = resolve_trail.pop();
|
||||
|
||||
@ -2271,8 +2284,6 @@ fn resolveUse(
|
||||
// it is self-referential and we cannot resolve it.
|
||||
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
|
||||
return null;
|
||||
// We use the backing allocator here because the ArrayList expects its
|
||||
// allocated memory to persist while it is empty.
|
||||
try using_trail.append(symbol.ptr);
|
||||
defer _ = using_trail.pop();
|
||||
|
||||
@ -2636,7 +2647,7 @@ fn makeInnerScope(
|
||||
.insertText = name,
|
||||
.insertTextFormat = .PlainText,
|
||||
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs|
|
||||
.{ .kind = .Markdown, .value = docs }
|
||||
types.MarkupContent{ .kind = .Markdown, .value = docs }
|
||||
else
|
||||
null,
|
||||
}, {});
|
||||
@ -2927,15 +2938,12 @@ fn makeScopeInternal(
|
||||
std.debug.assert(token_tags[name_token] == .identifier);
|
||||
|
||||
const name = tree.tokenSlice(name_token);
|
||||
try scope.decls.putNoClobber(name, if (is_for)
|
||||
.{
|
||||
try scope.decls.putNoClobber(name, if (is_for) .{
|
||||
.array_payload = .{
|
||||
.identifier = name_token,
|
||||
.array_expr = while_node.ast.cond_expr,
|
||||
},
|
||||
}
|
||||
else
|
||||
.{
|
||||
} else .{
|
||||
.pointer_payload = .{
|
||||
.name = name_token,
|
||||
.condition = while_node.ast.cond_expr,
|
||||
|
@ -230,6 +230,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: style warnings for types, values and declarations below root scope
|
||||
if (tree.errors.len == 0) {
|
||||
for (tree.rootDecls()) |decl_idx| {
|
||||
const decl = tree.nodes.items(.tag)[decl_idx];
|
||||
@ -643,7 +644,7 @@ fn hoverSymbol(
|
||||
},
|
||||
.param_decl => |param| def: {
|
||||
if (param.first_doc_comment) |doc_comments| {
|
||||
doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind);
|
||||
doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind, false);
|
||||
}
|
||||
|
||||
const first_token = param.first_doc_comment orelse
|
||||
@ -959,7 +960,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
||||
const doc = if (param.first_doc_comment) |doc_comments|
|
||||
types.MarkupContent{
|
||||
.kind = doc_kind,
|
||||
.value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind),
|
||||
.value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind, false),
|
||||
}
|
||||
else
|
||||
null;
|
||||
|
@ -321,89 +321,49 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan
|
||||
}
|
||||
}
|
||||
|
||||
fn writeContainerField(
|
||||
builder: *Builder,
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
node: ast.Node.Index,
|
||||
field_token_type: ?TokenType,
|
||||
child_frame: anytype,
|
||||
) !void {
|
||||
const tree = builder.handle.tree;
|
||||
const container_field = containerField(tree, node).?;
|
||||
const base = tree.nodes.items(.main_token)[node];
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
|
||||
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, container_field.comptime_token, .keyword);
|
||||
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
|
||||
|
||||
if (container_field.ast.type_expr != 0) {
|
||||
if (container_field.ast.align_expr != 0) {
|
||||
try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr });
|
||||
}
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr });
|
||||
}
|
||||
|
||||
if (container_field.ast.value_expr != 0) block: {
|
||||
const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0)
|
||||
lastToken(tree, container_field.ast.type_expr) + 1
|
||||
else if (container_field.ast.align_expr != 0)
|
||||
lastToken(tree, container_field.ast.align_expr) + 1
|
||||
else
|
||||
break :block; // Check this, I believe it is correct.
|
||||
|
||||
try writeToken(builder, eq_tok, .operator);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr });
|
||||
}
|
||||
}
|
||||
|
||||
// TODO This is very slow and does a lot of extra work, improve in the future.
|
||||
fn writeNodeTokens(
|
||||
builder: *Builder,
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
maybe_node: ?ast.Node.Index,
|
||||
) error{OutOfMemory}!void {
|
||||
if (maybe_node == null) return;
|
||||
const node = maybe_node.?;
|
||||
if (node == 0) return;
|
||||
const start_node = maybe_node orelse return;
|
||||
|
||||
const handle = builder.handle;
|
||||
const tree = handle.tree;
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const datas = tree.nodes.items(.data);
|
||||
const node_data = tree.nodes.items(.data);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
if (node > datas.len) return;
|
||||
if (start_node > node_data.len) return;
|
||||
|
||||
var stack = std.ArrayList(ast.Node.Index).init(arena.child_allocator);
|
||||
defer stack.deinit();
|
||||
|
||||
try stack.append(start_node);
|
||||
|
||||
while (stack.popOrNull()) |node| {
|
||||
if (node == 0 or node > node_data.len) continue;
|
||||
|
||||
const tag = node_tags[node];
|
||||
const main_token = main_tokens[node];
|
||||
|
||||
const FrameSize = @sizeOf(@Frame(writeNodeTokens));
|
||||
var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
|
||||
defer arena.child_allocator.free(child_frame);
|
||||
|
||||
switch (tag) {
|
||||
.root => unreachable,
|
||||
.container_field,
|
||||
.container_field_align,
|
||||
.container_field_init,
|
||||
=> try writeContainerField(builder, arena, store, node, .field, child_frame),
|
||||
=> try writeContainerField(builder, arena, store, node, .field, &stack),
|
||||
.@"errdefer" => {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
|
||||
if (datas[node].lhs != 0) {
|
||||
const payload_tok = datas[node].lhs;
|
||||
if (node_data[node].lhs != 0) {
|
||||
const payload_tok = node_data[node].lhs;
|
||||
try writeToken(builder, payload_tok - 1, .operator);
|
||||
try writeToken(builder, payload_tok, .variable);
|
||||
try writeToken(builder, payload_tok + 1, .operator);
|
||||
}
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.block,
|
||||
.block_semicolon,
|
||||
@ -417,12 +377,12 @@ fn writeNodeTokens(
|
||||
|
||||
var gap_highlighter = GapHighlighter.init(builder, first_tok);
|
||||
const statements: []const ast.Node.Index = switch (tag) {
|
||||
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs],
|
||||
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
||||
.block_two, .block_two_semicolon => blk: {
|
||||
const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
|
||||
const len: usize = if (datas[node].lhs == 0)
|
||||
const statements = &[_]ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
|
||||
const len: usize = if (node_data[node].lhs == 0)
|
||||
@as(usize, 0)
|
||||
else if (datas[node].rhs == 0)
|
||||
else if (node_data[node].rhs == 0)
|
||||
@as(usize, 1)
|
||||
else
|
||||
@as(usize, 2);
|
||||
@ -434,9 +394,9 @@ fn writeNodeTokens(
|
||||
for (statements) |child| {
|
||||
try gap_highlighter.next(child);
|
||||
if (node_tags[child].isContainerField()) {
|
||||
try writeContainerField(builder, arena, store, child, .field, child_frame);
|
||||
try writeContainerField(builder, arena, store, child, .field, &stack);
|
||||
} else {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
|
||||
try stack.append(child);
|
||||
}
|
||||
}
|
||||
|
||||
@ -449,7 +409,7 @@ fn writeNodeTokens(
|
||||
=> {
|
||||
const var_decl = varDecl(tree, node).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
||||
try writeDocComments(builder, handle.tree, comment_idx);
|
||||
try writeDocComments(builder, tree, comment_idx);
|
||||
|
||||
try writeToken(builder, var_decl.visib_token, .keyword);
|
||||
try writeToken(builder, var_decl.extern_export_token, .keyword);
|
||||
@ -464,22 +424,22 @@ fn writeNodeTokens(
|
||||
}
|
||||
|
||||
if (var_decl.ast.type_node != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node });
|
||||
try stack.append(var_decl.ast.type_node);
|
||||
if (var_decl.ast.align_node != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node });
|
||||
try stack.append(var_decl.ast.align_node);
|
||||
if (var_decl.ast.section_node != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node });
|
||||
try stack.append(var_decl.ast.section_node);
|
||||
|
||||
try writeToken(builder, var_decl.ast.mut_token + 2, .operator);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node });
|
||||
try stack.append(var_decl.ast.init_node);
|
||||
},
|
||||
.@"usingnamespace" => {
|
||||
const first_tok = tree.firstToken(node);
|
||||
if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment)
|
||||
try writeDocComments(builder, builder.handle.tree, first_tok - 1);
|
||||
try writeDocComments(builder, tree, first_tok - 1);
|
||||
try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword);
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.container_decl,
|
||||
.container_decl_trailing,
|
||||
@ -509,31 +469,31 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, decl.ast.main_token, .keyword);
|
||||
if (decl.ast.enum_token) |enum_token| {
|
||||
if (decl.ast.arg != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg })
|
||||
try stack.append(decl.ast.arg)
|
||||
else
|
||||
try writeToken(builder, enum_token, .keyword);
|
||||
} else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg });
|
||||
} else if (decl.ast.arg != 0) try stack.append(decl.ast.arg);
|
||||
|
||||
var gap_highlighter = GapHighlighter.init(builder, main_token + 1);
|
||||
const field_token_type = fieldTokenType(node, handle);
|
||||
for (decl.ast.members) |child| {
|
||||
try gap_highlighter.next(child);
|
||||
if (node_tags[child].isContainerField()) {
|
||||
try writeContainerField(builder, arena, store, child, field_token_type, child_frame);
|
||||
try writeContainerField(builder, arena, store, child, field_token_type, &stack);
|
||||
} else {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
|
||||
try stack.append(child);
|
||||
}
|
||||
}
|
||||
try gap_highlighter.end(lastToken(tree, node));
|
||||
},
|
||||
.error_value => {
|
||||
if (datas[node].lhs != 0) {
|
||||
try writeToken(builder, datas[node].lhs - 1, .keyword);
|
||||
if (node_data[node].lhs != 0) {
|
||||
try writeToken(builder, node_data[node].lhs - 1, .keyword);
|
||||
}
|
||||
try writeToken(builder, datas[node].rhs, .errorTag);
|
||||
try writeToken(builder, node_data[node].rhs, .errorTag);
|
||||
},
|
||||
.identifier => {
|
||||
if (analysis.isTypeIdent(handle.tree, main_token)) {
|
||||
if (analysis.isTypeIdent(tree, main_token)) {
|
||||
return try writeToken(builder, main_token, .type);
|
||||
}
|
||||
|
||||
@ -541,8 +501,8 @@ fn writeNodeTokens(
|
||||
store,
|
||||
arena,
|
||||
handle,
|
||||
handle.tree.getNodeSource(node),
|
||||
handle.tree.tokens.items(.start)[main_token],
|
||||
tree.getNodeSource(node),
|
||||
tree.tokens.items(.start)[main_token],
|
||||
)) |child| {
|
||||
if (child.decl.* == .param_decl) {
|
||||
return try writeToken(builder, main_token, .parameter);
|
||||
@ -564,19 +524,19 @@ fn writeNodeTokens(
|
||||
var buf: [1]ast.Node.Index = undefined;
|
||||
const fn_proto: ast.full.FnProto = fnProto(tree, node, &buf).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
||||
try writeDocComments(builder, handle.tree, docs);
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, fn_proto.visib_token, .keyword);
|
||||
try writeToken(builder, fn_proto.extern_export_token, .keyword);
|
||||
try writeToken(builder, fn_proto.lib_name, .string);
|
||||
try writeToken(builder, fn_proto.ast.fn_token, .keyword);
|
||||
|
||||
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto))
|
||||
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto))
|
||||
.type
|
||||
else
|
||||
.function;
|
||||
|
||||
const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto))
|
||||
const tok_mod = if (analysis.isGenericFunction(tree, fn_proto))
|
||||
TokenModifiers{ .generic = true }
|
||||
else
|
||||
TokenModifiers{};
|
||||
@ -585,59 +545,59 @@ fn writeNodeTokens(
|
||||
|
||||
var it = fn_proto.iterate(tree);
|
||||
while (it.next()) |param_decl| {
|
||||
if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, param_decl.comptime_noalias, .keyword);
|
||||
try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true });
|
||||
if (param_decl.anytype_ellipsis3) |any_token| {
|
||||
try writeToken(builder, any_token, .type);
|
||||
} else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr });
|
||||
} else if (param_decl.type_expr != 0) try stack.append(param_decl.type_expr);
|
||||
}
|
||||
|
||||
if (fn_proto.ast.align_expr != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr });
|
||||
try stack.append(fn_proto.ast.align_expr);
|
||||
if (fn_proto.ast.section_expr != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr });
|
||||
try stack.append(fn_proto.ast.section_expr);
|
||||
if (fn_proto.ast.callconv_expr != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr });
|
||||
try stack.append(fn_proto.ast.callconv_expr);
|
||||
|
||||
if (fn_proto.ast.return_type != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type });
|
||||
try stack.append(fn_proto.ast.return_type);
|
||||
|
||||
if (tag == .fn_decl)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.anyframe_type => {
|
||||
try writeToken(builder, main_token, .type);
|
||||
if (datas[node].rhs != 0) {
|
||||
try writeToken(builder, datas[node].lhs, .type);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
if (node_data[node].rhs != 0) {
|
||||
try writeToken(builder, node_data[node].lhs, .type);
|
||||
try stack.append(node_data[node].rhs);
|
||||
}
|
||||
},
|
||||
.@"defer" => {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.@"comptime",
|
||||
.@"nosuspend",
|
||||
=> {
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
try writeDocComments(builder, handle.tree, doc);
|
||||
try writeDocComments(builder, tree, doc);
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.@"switch",
|
||||
.switch_comma,
|
||||
=> {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange);
|
||||
try stack.append(node_data[node].lhs);
|
||||
const extra = tree.extraData(node_data[node].rhs, ast.Node.SubRange);
|
||||
const cases = tree.extra_data[extra.start..extra.end];
|
||||
|
||||
var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, datas[node].lhs) + 1);
|
||||
var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, node_data[node].lhs) + 1);
|
||||
for (cases) |case_node| {
|
||||
try gap_highlighter.next(case_node);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node });
|
||||
try stack.append(case_node);
|
||||
}
|
||||
try gap_highlighter.end(lastToken(tree, node));
|
||||
},
|
||||
@ -645,7 +605,7 @@ fn writeNodeTokens(
|
||||
.switch_case,
|
||||
=> {
|
||||
const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node);
|
||||
for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node });
|
||||
for (switch_case.ast.values) |item_node| try stack.append(item_node);
|
||||
// check it it's 'else'
|
||||
if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword);
|
||||
try writeToken(builder, switch_case.ast.arrow_token, .operator);
|
||||
@ -653,7 +613,7 @@ fn writeNodeTokens(
|
||||
const p_token = @boolToInt(token_tags[payload_token] == .asterisk);
|
||||
try writeToken(builder, p_token, .variable);
|
||||
}
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr });
|
||||
try stack.append(switch_case.ast.target_expr);
|
||||
},
|
||||
.@"while",
|
||||
.while_simple,
|
||||
@ -665,7 +625,7 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, while_node.label_token, .label);
|
||||
try writeToken(builder, while_node.inline_token, .keyword);
|
||||
try writeToken(builder, while_node.ast.while_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr });
|
||||
try stack.append(while_node.ast.cond_expr);
|
||||
if (while_node.payload_token) |payload| {
|
||||
try writeToken(builder, payload - 1, .operator);
|
||||
try writeToken(builder, payload, .variable);
|
||||
@ -678,9 +638,9 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, r_pipe, .operator);
|
||||
}
|
||||
if (while_node.ast.cont_expr != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr });
|
||||
try stack.append(while_node.ast.cont_expr);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr });
|
||||
try stack.append(while_node.ast.then_expr);
|
||||
|
||||
if (while_node.ast.else_expr != 0) {
|
||||
try writeToken(builder, while_node.else_token, .keyword);
|
||||
@ -690,7 +650,7 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, err_token, .variable);
|
||||
try writeToken(builder, err_token + 1, .operator);
|
||||
}
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr });
|
||||
try stack.append(while_node.ast.else_expr);
|
||||
}
|
||||
},
|
||||
.@"if",
|
||||
@ -699,7 +659,7 @@ fn writeNodeTokens(
|
||||
const if_node = ifFull(tree, node);
|
||||
|
||||
try writeToken(builder, if_node.ast.if_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr });
|
||||
try stack.append(if_node.ast.cond_expr);
|
||||
|
||||
if (if_node.payload_token) |payload| {
|
||||
// if (?x) |x|
|
||||
@ -707,7 +667,7 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, payload, .variable); // x
|
||||
try writeToken(builder, payload + 1, .operator); // |
|
||||
}
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr });
|
||||
try stack.append(if_node.ast.then_expr);
|
||||
|
||||
if (if_node.ast.else_expr != 0) {
|
||||
try writeToken(builder, if_node.else_token, .keyword);
|
||||
@ -717,7 +677,7 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, err_token, .variable); // err
|
||||
try writeToken(builder, err_token + 1, .operator); // |
|
||||
}
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr });
|
||||
try stack.append(if_node.ast.else_expr);
|
||||
}
|
||||
},
|
||||
.array_init,
|
||||
@ -739,8 +699,8 @@ fn writeNodeTokens(
|
||||
};
|
||||
|
||||
if (array_init.ast.type_expr != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr });
|
||||
for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem });
|
||||
try stack.append(array_init.ast.type_expr);
|
||||
for (array_init.ast.elements) |elem| try stack.append(elem);
|
||||
},
|
||||
.struct_init,
|
||||
.struct_init_comma,
|
||||
@ -763,7 +723,7 @@ fn writeNodeTokens(
|
||||
var field_token_type: ?TokenType = null;
|
||||
|
||||
if (struct_init.ast.type_expr != 0) {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr });
|
||||
try stack.append(struct_init.ast.type_expr);
|
||||
|
||||
field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{
|
||||
.node = struct_init.ast.type_expr,
|
||||
@ -785,7 +745,7 @@ fn writeNodeTokens(
|
||||
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
|
||||
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
|
||||
try writeToken(builder, init_token - 1, .operator); // '='
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init });
|
||||
try stack.append(field_init);
|
||||
}
|
||||
try gap_highlighter.end(lastToken(tree, node));
|
||||
},
|
||||
@ -806,14 +766,14 @@ fn writeNodeTokens(
|
||||
};
|
||||
|
||||
try writeToken(builder, call.async_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr });
|
||||
try stack.append(call.ast.fn_expr);
|
||||
|
||||
if (builder.current_token) |curr_tok| {
|
||||
if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) {
|
||||
try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function);
|
||||
}
|
||||
}
|
||||
for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
|
||||
for (call.ast.params) |param| try stack.append(param);
|
||||
},
|
||||
.slice,
|
||||
.slice_open,
|
||||
@ -826,43 +786,43 @@ fn writeNodeTokens(
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced });
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start });
|
||||
try stack.append(slice.ast.sliced);
|
||||
try stack.append(slice.ast.start);
|
||||
try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator);
|
||||
|
||||
if (slice.ast.end != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end });
|
||||
try stack.append(slice.ast.end);
|
||||
if (slice.ast.sentinel != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel });
|
||||
try stack.append(slice.ast.sentinel);
|
||||
},
|
||||
.array_access => {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.deref => {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
try writeToken(builder, main_token, .operator);
|
||||
},
|
||||
.unwrap_optional => {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
try writeToken(builder, main_token + 1, .operator);
|
||||
},
|
||||
.grouped_expression => {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.@"break",
|
||||
.@"continue",
|
||||
=> {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
if (datas[node].lhs != 0)
|
||||
try writeToken(builder, datas[node].lhs, .label);
|
||||
if (datas[node].rhs != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
if (node_data[node].lhs != 0)
|
||||
try writeToken(builder, node_data[node].lhs, .label);
|
||||
if (node_data[node].rhs != 0)
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.@"suspend", .@"return" => {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
if (datas[node].lhs != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
if (node_data[node].lhs != 0)
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.integer_literal,
|
||||
.float_literal,
|
||||
@ -878,7 +838,7 @@ fn writeNodeTokens(
|
||||
.builtin_call_two,
|
||||
.builtin_call_two_comma,
|
||||
=> {
|
||||
const data = datas[node];
|
||||
const data = node_data[node];
|
||||
const params = switch (tag) {
|
||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
||||
@ -892,7 +852,7 @@ fn writeNodeTokens(
|
||||
|
||||
try writeToken(builder, main_token, .builtin);
|
||||
for (params) |param|
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
|
||||
try stack.append(param);
|
||||
},
|
||||
.string_literal,
|
||||
.char_literal,
|
||||
@ -901,7 +861,7 @@ fn writeNodeTokens(
|
||||
},
|
||||
.multiline_string_literal => {
|
||||
var cur_tok = main_token;
|
||||
const last_tok = datas[node].rhs;
|
||||
const last_tok = node_data[node].rhs;
|
||||
|
||||
while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string);
|
||||
},
|
||||
@ -929,7 +889,7 @@ fn writeNodeTokens(
|
||||
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try writeToken(builder, asm_node.volatile_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template });
|
||||
try stack.append(asm_node.ast.template);
|
||||
// TODO Inputs, outputs.
|
||||
},
|
||||
.@"anytype" => {
|
||||
@ -937,20 +897,20 @@ fn writeNodeTokens(
|
||||
},
|
||||
.test_decl => {
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
try writeDocComments(builder, handle.tree, doc);
|
||||
try writeDocComments(builder, tree, doc);
|
||||
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
if (token_tags[main_token + 1] == .string_literal)
|
||||
try writeToken(builder, main_token + 1, .string);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.@"catch" => {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
if (token_tags[main_token + 1] == .pipe)
|
||||
try writeToken(builder, main_token + 1, .variable);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.add,
|
||||
.add_wrap,
|
||||
@ -994,22 +954,22 @@ fn writeNodeTokens(
|
||||
.sub_wrap,
|
||||
.@"orelse",
|
||||
=> {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
const token_type: TokenType = switch (tag) {
|
||||
.bool_and, .bool_or => .keyword,
|
||||
else => .operator,
|
||||
};
|
||||
|
||||
try writeToken(builder, main_token, token_type);
|
||||
if (datas[node].rhs != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||
if (node_data[node].rhs != 0)
|
||||
try stack.append(node_data[node].rhs);
|
||||
},
|
||||
.field_access => {
|
||||
const data = datas[node];
|
||||
const data = node_data[node];
|
||||
if (data.rhs == 0) return;
|
||||
const rhs_str = tree.tokenSlice(data.rhs);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs });
|
||||
try stack.append(data.lhs);
|
||||
|
||||
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
|
||||
// writeToken code.
|
||||
@ -1066,22 +1026,12 @@ fn writeNodeTokens(
|
||||
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
|
||||
main_token == main_tokens[ptr_type.ast.child_type])
|
||||
{
|
||||
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
|
||||
builder,
|
||||
arena,
|
||||
store,
|
||||
ptr_type.ast.child_type,
|
||||
});
|
||||
return try stack.append(ptr_type.ast.child_type);
|
||||
}
|
||||
|
||||
if (ptr_type.size == .One) try writeToken(builder, main_token, .operator);
|
||||
if (ptr_type.ast.sentinel != 0) {
|
||||
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
|
||||
builder,
|
||||
arena,
|
||||
store,
|
||||
ptr_type.ast.sentinel,
|
||||
});
|
||||
return try stack.append(ptr_type.ast.sentinel);
|
||||
}
|
||||
|
||||
try writeToken(builder, ptr_type.allowzero_token, .keyword);
|
||||
@ -1089,19 +1039,19 @@ fn writeNodeTokens(
|
||||
if (ptr_type.ast.align_node != 0) {
|
||||
const first_tok = tree.firstToken(ptr_type.ast.align_node);
|
||||
try writeToken(builder, first_tok - 2, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node });
|
||||
try stack.append(ptr_type.ast.align_node);
|
||||
|
||||
if (ptr_type.ast.bit_range_start != 0) {
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start });
|
||||
try stack.append(ptr_type.ast.bit_range_start);
|
||||
try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end });
|
||||
try stack.append(ptr_type.ast.bit_range_end);
|
||||
}
|
||||
}
|
||||
|
||||
try writeToken(builder, ptr_type.const_token, .keyword);
|
||||
try writeToken(builder, ptr_type.volatile_token, .keyword);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type });
|
||||
try stack.append(ptr_type.ast.child_type);
|
||||
},
|
||||
.array_type,
|
||||
.array_type_sentinel,
|
||||
@ -1111,11 +1061,11 @@ fn writeNodeTokens(
|
||||
else
|
||||
tree.arrayTypeSentinel(node);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count });
|
||||
try stack.append(array_type.ast.elem_count);
|
||||
if (array_type.ast.sentinel != 0)
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.sentinel });
|
||||
try stack.append(array_type.ast.sentinel);
|
||||
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type });
|
||||
try stack.append(array_type.ast.elem_type);
|
||||
},
|
||||
.address_of,
|
||||
.bit_not,
|
||||
@ -1125,18 +1075,59 @@ fn writeNodeTokens(
|
||||
.negation_wrap,
|
||||
=> {
|
||||
try writeToken(builder, main_token, .operator);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.@"try",
|
||||
.@"resume",
|
||||
.@"await",
|
||||
=> {
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||
try stack.append(node_data[node].lhs);
|
||||
},
|
||||
.anyframe_literal => try writeToken(builder, main_token, .keyword),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeContainerField(
|
||||
builder: *Builder,
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
node: ast.Node.Index,
|
||||
field_token_type: ?TokenType,
|
||||
stack: *std.ArrayList(ast.Node.Index),
|
||||
) !void {
|
||||
const tree = builder.handle.tree;
|
||||
const container_field = containerField(tree, node).?;
|
||||
const base = tree.nodes.items(.main_token)[node];
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
|
||||
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, container_field.comptime_token, .keyword);
|
||||
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
|
||||
|
||||
if (container_field.ast.type_expr != 0) {
|
||||
if (container_field.ast.align_expr != 0) {
|
||||
try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
|
||||
try stack.append(container_field.ast.align_expr);
|
||||
}
|
||||
try stack.append(container_field.ast.type_expr);
|
||||
}
|
||||
|
||||
if (container_field.ast.value_expr != 0) block: {
|
||||
const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0)
|
||||
lastToken(tree, container_field.ast.type_expr) + 1
|
||||
else if (container_field.ast.align_expr != 0)
|
||||
lastToken(tree, container_field.ast.align_expr) + 1
|
||||
else
|
||||
break :block; // Check this, I believe it is correct.
|
||||
|
||||
try writeToken(builder, eq_tok, .operator);
|
||||
try stack.append(container_field.ast.value_expr);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Range version, edit version.
|
||||
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
|
||||
|
@ -23,15 +23,7 @@ fn fnProtoToSignatureInfo(
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const alloc = &arena.allocator;
|
||||
const label = analysis.getFunctionSignature(tree, proto);
|
||||
const proto_comments = types.MarkupContent{ .value = if (try analysis.getDocComments(
|
||||
alloc,
|
||||
tree,
|
||||
fn_node,
|
||||
.Markdown,
|
||||
)) |dc|
|
||||
dc
|
||||
else
|
||||
"" };
|
||||
const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .Markdown)) orelse "";
|
||||
|
||||
const arg_idx = if (skip_self_param) blk: {
|
||||
const has_self_param = try analysis.hasSelfParam(arena, document_store, handle, proto);
|
||||
@ -42,14 +34,9 @@ fn fnProtoToSignatureInfo(
|
||||
var param_it = proto.iterate(tree);
|
||||
while (param_it.next()) |param| {
|
||||
const param_comments = if (param.first_doc_comment) |dc|
|
||||
types.MarkupContent{ .value = try analysis.collectDocComments(
|
||||
alloc,
|
||||
tree,
|
||||
dc,
|
||||
.Markdown,
|
||||
) }
|
||||
try analysis.collectDocComments(alloc, tree, dc, .Markdown, false)
|
||||
else
|
||||
null;
|
||||
"";
|
||||
|
||||
var param_label_start: usize = 0;
|
||||
var param_label_end: usize = 0;
|
||||
@ -77,12 +64,12 @@ fn fnProtoToSignatureInfo(
|
||||
const param_label = tree.source[param_label_start..param_label_end];
|
||||
try params.append(alloc, .{
|
||||
.label = param_label,
|
||||
.documentation = param_comments,
|
||||
.documentation = types.MarkupContent{ .value = param_comments },
|
||||
});
|
||||
}
|
||||
return types.SignatureInformation{
|
||||
.label = label,
|
||||
.documentation = proto_comments,
|
||||
.documentation = types.MarkupContent{ .value = proto_comments },
|
||||
.parameters = params.items,
|
||||
.activeParameter = arg_idx,
|
||||
};
|
||||
|
@ -189,9 +189,76 @@ test "Request completion with no trailing whitespace" {
|
||||
);
|
||||
}
|
||||
|
||||
test "Encoded space in file name and usingnamespace on non-existing symbol" {
|
||||
var server = try Server.start(initialize_msg, null);
|
||||
defer server.shutdown();
|
||||
|
||||
try server.request("textDocument/didOpen",
|
||||
\\{"textDocument":{"uri":"file:///%20test.zig","languageId":"zig","version":420,"text":"usingnamespace a.b;\nb."}}
|
||||
, null);
|
||||
try server.request("textDocument/completion",
|
||||
\\{"textDocument":{"uri":"file:///%20test.zig"}, "position":{"line":1,"character":2}}
|
||||
,
|
||||
\\{"isIncomplete":false,"items":[]}
|
||||
);
|
||||
}
|
||||
|
||||
test "Self-referential definition" {
|
||||
var server = try Server.start(initialize_msg, null);
|
||||
defer server.shutdown();
|
||||
try server.request("textDocument/didOpen",
|
||||
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const h = h(0);\nc"}}
|
||||
, null);
|
||||
try server.request("textDocument/completion",
|
||||
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
|
||||
,
|
||||
\\{"isIncomplete":false,"items":[{"label":"h","kind":21,"textEdit":null,"filterText":null,"insertText":"h","insertTextFormat":1,"detail":"const h = h(0)","documentation":null}]}
|
||||
);
|
||||
}
|
||||
test "Missing return type" {
|
||||
var server = try Server.start(initialize_msg, null);
|
||||
defer server.shutdown();
|
||||
try server.request("textDocument/didOpen",
|
||||
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"fn w() {}\nc"}}
|
||||
, null);
|
||||
try server.request("textDocument/completion",
|
||||
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
|
||||
,
|
||||
\\{"isIncomplete":false,"items":[{"label":"w","kind":3,"textEdit":null,"filterText":null,"insertText":"w","insertTextFormat":1,"detail":"fn","documentation":null}]}
|
||||
);
|
||||
}
|
||||
|
||||
test "Pointer and optional deref" {
|
||||
var server = try Server.start(initialize_msg, null);
|
||||
defer server.shutdown();
|
||||
try server.request("textDocument/didOpen",
|
||||
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"var value: ?struct { data: i32 = 5 } = null;const ptr = &value;\nconst a = ptr.*.?."}}
|
||||
, null);
|
||||
try server.request("textDocument/completion",
|
||||
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":18}}
|
||||
,
|
||||
\\{"isIncomplete":false,"items":[{"label":"data","kind":5,"textEdit":null,"filterText":null,"insertText":"data","insertTextFormat":1,"detail":"data: i32 = 5","documentation":null}]}
|
||||
);
|
||||
}
|
||||
|
||||
test "Request utf-8 offset encoding" {
|
||||
var server = try Server.start(initialize_msg_offs,
|
||||
\\{"offsetEncoding":"utf-8","capabilities":{"signatureHelpProvider":{"triggerCharacters":["("],"retriggerCharacters":[","]},"textDocumentSync":1,"renameProvider":true,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":true,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":true,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true,"workspace":{"workspaceFolders":{"supported":false,"changeNotifications":false}},"semanticTokensProvider":{"full":true,"range":false,"legend":{"tokenTypes":["type","parameter","variable","enumMember","field","errorTag","function","keyword","comment","string","number","operator","builtin","label","keywordLiteral"],"tokenModifiers":["namespace","struct","enum","union","opaque","declaration","async","documentation","generic"]}}},"serverInfo":{"name":"zls","version":"0.1.0"}}
|
||||
);
|
||||
server.shutdown();
|
||||
}
|
||||
|
||||
// not fixed yet!
|
||||
// test "Self-referential import" {
|
||||
// var server = try Server.start(initialize_msg, null);
|
||||
// defer server.shutdown();
|
||||
// try server.request("textDocument/didOpen",
|
||||
// \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const a = @import(\"test.zig\").a;\nc"}}
|
||||
// , null);
|
||||
// try server.request("textDocument/completion",
|
||||
// \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
|
||||
// ,
|
||||
// \\{"isIncomplete":false,"items":[]}
|
||||
// );
|
||||
// }
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user