Calculate correct token locations and ensure all semantic highlighting matches
This commit is contained in:
parent
acc45b4efe
commit
3d8a9732fc
136
src/analysis.zig
136
src/analysis.zig
@ -14,42 +14,48 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI
|
|||||||
var idx = current;
|
var idx = current;
|
||||||
if (idx == 0) return null;
|
if (idx == 0) return null;
|
||||||
switch (tags[node]) {
|
switch (tags[node]) {
|
||||||
.fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => {
|
.fn_proto,
|
||||||
|
.fn_proto_one,
|
||||||
|
.fn_proto_simple,
|
||||||
|
.fn_proto_multi,
|
||||||
|
.fn_decl,
|
||||||
|
=> {
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
if (tokens[idx] == .keyword_extern and idx > 0)
|
if (tokens[idx] == .keyword_extern and idx > 0)
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
if (tokens[idx] == .keyword_pub and idx > 0)
|
if (tokens[idx] == .keyword_pub and idx > 0)
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
},
|
},
|
||||||
.local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => {
|
.local_var_decl,
|
||||||
|
.global_var_decl,
|
||||||
|
.aligned_var_decl,
|
||||||
|
.simple_var_decl,
|
||||||
|
=> {
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
if (tokens[idx] == .keyword_pub and idx > 0)
|
if (tokens[idx] == .keyword_pub and idx > 0)
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
},
|
},
|
||||||
.container_field, .container_field_init, .container_field_align => {
|
.error_value => idx -= 1,
|
||||||
idx -= 2; // skip '.' token
|
.container_field,
|
||||||
},
|
.container_field_init,
|
||||||
|
.container_field_align,
|
||||||
|
=> idx -= 1,
|
||||||
|
.test_decl => idx -= 1,
|
||||||
else => {
|
else => {
|
||||||
if (isContainer(tags[node])) {
|
log.debug("Doc comment check for tag: {s}", .{tags[node]});
|
||||||
idx -= 1; // go to '='
|
|
||||||
idx -= 1; // mutability
|
|
||||||
idx -= 1; // possible 'pub'
|
|
||||||
if (tokens[idx] == .keyword_pub and idx > 0)
|
|
||||||
idx -= 1;
|
|
||||||
} else log.debug("Doc comment check for tag: {s}", .{tags[node]});
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find first doc comment token
|
// Find first doc comment token
|
||||||
if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) {
|
if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) {
|
||||||
while ((tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) and idx > 0) : (idx -= 1) {}
|
while (idx > 0 and
|
||||||
return idx + 1;
|
(tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment))
|
||||||
|
{
|
||||||
|
idx -= 1;
|
||||||
|
}
|
||||||
|
return idx + @boolToInt(tokens[idx] != .doc_comment and tokens[idx] != .container_doc_comment);
|
||||||
}
|
}
|
||||||
|
|
||||||
// @TODO: Implement doc comments for tags
|
|
||||||
// } else if (node.castTag(.ErrorTag)) |tag| {
|
|
||||||
// return tag.doc_comments;
|
|
||||||
// }
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,9 +103,9 @@ pub fn collectDocComments(
|
|||||||
|
|
||||||
/// Gets a function signature (keywords, name, return value)
|
/// Gets a function signature (keywords, name, return value)
|
||||||
pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 {
|
pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 {
|
||||||
const start = tree.tokenLocation(0, func.ast.fn_token).line_start;
|
const start = offsets.tokenLocation(tree, func.ast.fn_token).start;
|
||||||
const end = tree.tokenLocation(0, tree.nodes.items(.main_token)[func.ast.return_type]).line_end;
|
const end = offsets.tokenLocation(tree, tree.nodes.items(.main_token)[func.ast.return_type]).end;
|
||||||
return tree.source[start .. end - 1];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets a function snippet insert text
|
/// Gets a function snippet insert text
|
||||||
@ -161,17 +167,17 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a
|
|||||||
|
|
||||||
/// Gets a function signature (keywords, name, return value)
|
/// Gets a function signature (keywords, name, return value)
|
||||||
pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 {
|
pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 {
|
||||||
const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start;
|
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
||||||
const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end;
|
const end = offsets.tokenLocation(tree, tree.lastToken(var_decl.ast.init_node)).end;
|
||||||
return tree.source[start..end];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
// analysis.getContainerFieldSignature(handle.tree, field)
|
// analysis.getContainerFieldSignature(handle.tree, field)
|
||||||
pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 {
|
pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 {
|
||||||
const start = tree.tokenLocation(0, field.ast.name_token).line_start;
|
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
||||||
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
||||||
const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(end_node)).line_end;
|
const end = offsets.tokenLocation(tree, tree.lastToken(end_node)).end;
|
||||||
return tree.source[start .. end - 1];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The type node is "type"
|
/// The type node is "type"
|
||||||
@ -610,7 +616,11 @@ pub fn resolveTypeOfNodeInternal(
|
|||||||
const starts = tree.tokens.items(.start);
|
const starts = tree.tokens.items(.start);
|
||||||
|
|
||||||
switch (node_tags[node]) {
|
switch (node_tags[node]) {
|
||||||
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
|
.global_var_decl,
|
||||||
|
.local_var_decl,
|
||||||
|
.simple_var_decl,
|
||||||
|
.aligned_var_decl,
|
||||||
|
=> {
|
||||||
const var_decl = varDecl(tree, node).?;
|
const var_decl = varDecl(tree, node).?;
|
||||||
if (var_decl.ast.type_node != 0) block: {
|
if (var_decl.ast.type_node != 0) block: {
|
||||||
return ((try resolveTypeOfNodeInternal(
|
return ((try resolveTypeOfNodeInternal(
|
||||||
@ -647,7 +657,10 @@ pub fn resolveTypeOfNodeInternal(
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
.container_field, .container_field_init, .container_field_align => |c| {
|
.container_field,
|
||||||
|
.container_field_init,
|
||||||
|
.container_field_align,
|
||||||
|
=> |c| {
|
||||||
const field: ast.full.ContainerField = switch (c) {
|
const field: ast.full.ContainerField = switch (c) {
|
||||||
.container_field => tree.containerField(node),
|
.container_field => tree.containerField(node),
|
||||||
.container_field_align => tree.containerFieldAlign(node),
|
.container_field_align => tree.containerFieldAlign(node),
|
||||||
@ -721,10 +734,17 @@ pub fn resolveTypeOfNodeInternal(
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
.@"comptime", .@"nosuspend", .grouped_expression => {
|
.@"comptime",
|
||||||
|
.@"nosuspend",
|
||||||
|
.grouped_expression,
|
||||||
|
=> {
|
||||||
return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params);
|
return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params);
|
||||||
},
|
},
|
||||||
.struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => {
|
.struct_init,
|
||||||
|
.struct_init_comma,
|
||||||
|
.struct_init_one,
|
||||||
|
.struct_init_one_comma,
|
||||||
|
=> {
|
||||||
return ((try resolveTypeOfNodeInternal(
|
return ((try resolveTypeOfNodeInternal(
|
||||||
store,
|
store,
|
||||||
arena,
|
arena,
|
||||||
@ -735,14 +755,19 @@ pub fn resolveTypeOfNodeInternal(
|
|||||||
.error_set_decl => {
|
.error_set_decl => {
|
||||||
return TypeWithHandle.typeVal(node_handle);
|
return TypeWithHandle.typeVal(node_handle);
|
||||||
},
|
},
|
||||||
.slice, .slice_sentinel, .slice_open => {
|
.slice,
|
||||||
|
.slice_sentinel,
|
||||||
|
.slice_open,
|
||||||
|
=> {
|
||||||
const left_type = (try resolveTypeOfNodeInternal(store, arena, .{
|
const left_type = (try resolveTypeOfNodeInternal(store, arena, .{
|
||||||
.node = datas[node].lhs,
|
.node = datas[node].lhs,
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
}, bound_type_params)) orelse return null;
|
}, bound_type_params)) orelse return null;
|
||||||
return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params);
|
return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params);
|
||||||
},
|
},
|
||||||
.deref, .unwrap_optional => {
|
.deref,
|
||||||
|
.unwrap_optional,
|
||||||
|
=> {
|
||||||
const left_type = (try resolveTypeOfNodeInternal(store, arena, .{
|
const left_type = (try resolveTypeOfNodeInternal(store, arena, .{
|
||||||
.node = datas[node].lhs,
|
.node = datas[node].lhs,
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
@ -931,10 +956,7 @@ pub fn resolveTypeOfNodeInternal(
|
|||||||
.type = .{ .data = .{ .other = node }, .is_type_val = false },
|
.type = .{ .data = .{ .other = node }, .is_type_val = false },
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
},
|
},
|
||||||
.root => return TypeWithHandle.typeVal(node_handle),
|
else => {},
|
||||||
else => {
|
|
||||||
// log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]});
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -1319,6 +1341,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 {
|
|||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1),
|
=> return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1),
|
||||||
|
.test_decl => if (data[node].lhs != 0)
|
||||||
|
return tree.tokenSlice(data[node].lhs),
|
||||||
else => |tag| log.debug("INVALID: {}", .{tag}),
|
else => |tag| log.debug("INVALID: {}", .{tag}),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1326,8 +1350,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool {
|
fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool {
|
||||||
const first_token = tree.tokenLocation(0, tree.firstToken(node)).line_start;
|
const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start;
|
||||||
const last_token = tree.tokenLocation(@truncate(u32, first_token), tree.lastToken(node)).line_end;
|
const last_token = offsets.tokenLocation(tree, tree.lastToken(node)).end;
|
||||||
return source_index >= first_token and source_index <= last_token;
|
return source_index >= first_token and source_index <= last_token;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2332,11 +2356,11 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange {
|
fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange {
|
||||||
const loc_start = tree.tokenLocation(0, tree.firstToken(node));
|
const loc_start = offsets.tokenLocation(tree, tree.firstToken(node));
|
||||||
const loc_end = tree.tokenLocation(@truncate(u32, loc_start.line_start), tree.lastToken(node));
|
const loc_end = offsets.tokenLocation(tree, tree.lastToken(node));
|
||||||
return SourceRange{
|
return SourceRange{
|
||||||
.start = loc_start.line_start,
|
.start = loc_start.start,
|
||||||
.end = loc_end.line_end,
|
.end = loc_end.end,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2554,8 +2578,8 @@ fn makeScopeInternal(
|
|||||||
const scope = try scopes.addOne(allocator);
|
const scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, main_tokens[node_idx]).line_start,
|
.start = offsets.tokenLocation(tree, main_tokens[node_idx]).start,
|
||||||
.end = tree.tokenLocation(0, last_token).line_start,
|
.end = offsets.tokenLocation(tree, last_token).start,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2627,8 +2651,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, payload).line_start,
|
.start = offsets.tokenLocation(tree, payload).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(if_node.ast.then_expr)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2657,8 +2681,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, err_token).line_start,
|
.start = offsets.tokenLocation(tree, err_token).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(if_node.ast.else_expr)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.else_expr)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2687,8 +2711,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, while_node.ast.while_token).line_start,
|
.start = offsets.tokenLocation(tree, while_node.ast.while_token).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(node_idx)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(node_idx)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2704,8 +2728,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, payload).line_start,
|
.start = offsets.tokenLocation(tree, payload).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2733,8 +2757,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, err_token).line_start,
|
.start = offsets.tokenLocation(tree, err_token).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(while_node.ast.else_expr)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.else_expr)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
@ -2760,8 +2784,8 @@ fn makeScopeInternal(
|
|||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.range = .{
|
||||||
.start = tree.tokenLocation(0, payload).line_start,
|
.start = offsets.tokenLocation(tree, payload).start,
|
||||||
.end = tree.tokenLocation(0, tree.lastToken(switch_case.ast.target_expr)).line_end,
|
.end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end,
|
||||||
},
|
},
|
||||||
.decls = std.StringHashMap(Declaration).init(allocator),
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
||||||
.uses = &.{},
|
.uses = &.{},
|
||||||
|
22
src/main.zig
22
src/main.zig
@ -220,7 +220,12 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han
|
|||||||
for (tree.rootDecls()) |decl_idx| {
|
for (tree.rootDecls()) |decl_idx| {
|
||||||
const decl = tree.nodes.items(.tag)[decl_idx];
|
const decl = tree.nodes.items(.tag)[decl_idx];
|
||||||
switch (decl) {
|
switch (decl) {
|
||||||
.fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => blk: {
|
.fn_proto,
|
||||||
|
.fn_proto_multi,
|
||||||
|
.fn_proto_one,
|
||||||
|
.fn_proto_simple,
|
||||||
|
.fn_decl,
|
||||||
|
=> blk: {
|
||||||
var buf: [1]std.zig.ast.Node.Index = undefined;
|
var buf: [1]std.zig.ast.Node.Index = undefined;
|
||||||
const func = analysis.fnProto(tree, decl_idx, &buf).?;
|
const func = analysis.fnProto(tree, decl_idx, &buf).?;
|
||||||
if (func.extern_export_token != null) break :blk;
|
if (func.extern_export_token != null) break :blk;
|
||||||
@ -367,7 +372,12 @@ fn nodeToCompletion(
|
|||||||
if (is_type_val) return;
|
if (is_type_val) return;
|
||||||
|
|
||||||
switch (node_tags[node]) {
|
switch (node_tags[node]) {
|
||||||
.fn_proto, .fn_proto_multi, .fn_proto_one, .fn_decl => {
|
.fn_proto,
|
||||||
|
.fn_proto_multi,
|
||||||
|
.fn_proto_one,
|
||||||
|
.fn_proto_simple,
|
||||||
|
.fn_decl,
|
||||||
|
=> {
|
||||||
var buf: [1]std.zig.ast.Node.Index = undefined;
|
var buf: [1]std.zig.ast.Node.Index = undefined;
|
||||||
const func = analysis.fnProto(tree, node, &buf).?;
|
const func = analysis.fnProto(tree, node, &buf).?;
|
||||||
if (func.name_token) |name_token| {
|
if (func.name_token) |name_token| {
|
||||||
@ -376,7 +386,7 @@ fn nodeToCompletion(
|
|||||||
const insert_text = if (use_snippets) blk: {
|
const insert_text = if (use_snippets) blk: {
|
||||||
// TODO Also check if we are dot accessing from a type val and dont skip in that case.
|
// TODO Also check if we are dot accessing from a type val and dont skip in that case.
|
||||||
const skip_self_param = if (func.ast.params.len > 0) param_check: {
|
const skip_self_param = if (func.ast.params.len > 0) param_check: {
|
||||||
const in_container = analysis.innermostContainer(handle, tree.tokenLocation(0, func.ast.fn_token).line_start);
|
const in_container = analysis.innermostContainer(handle, tree.tokens.items(.start)[func.ast.fn_token]);
|
||||||
|
|
||||||
var it = func.iterate(tree);
|
var it = func.iterate(tree);
|
||||||
const param = it.next().?;
|
const param = it.next().?;
|
||||||
@ -603,7 +613,9 @@ fn hoverSymbol(
|
|||||||
tree.firstToken(param.type_expr);
|
tree.firstToken(param.type_expr);
|
||||||
const last_token = tree.lastToken(param.type_expr);
|
const last_token = tree.lastToken(param.type_expr);
|
||||||
|
|
||||||
const signature_str = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end];
|
const start = offsets.tokenLocation(tree, first_token).start;
|
||||||
|
const end = offsets.tokenLocation(tree, last_token).end;
|
||||||
|
const signature_str = tree.source[start..end];
|
||||||
break :param_decl if (hover_kind == .Markdown)
|
break :param_decl if (hover_kind == .Markdown)
|
||||||
try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str })
|
try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str })
|
||||||
else
|
else
|
||||||
@ -895,7 +907,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
|||||||
.label = tree.tokenSlice(param.name_token.?),
|
.label = tree.tokenSlice(param.name_token.?),
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end],
|
.detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end],
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.pointer_payload => |payload| {
|
.pointer_payload => |payload| {
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
|
const ast = std.zig.ast;
|
||||||
|
|
||||||
pub const Encoding = enum {
|
pub const Encoding = enum {
|
||||||
utf8,
|
utf8,
|
||||||
@ -70,7 +71,7 @@ pub const TokenLocation = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation {
|
pub fn tokenRelativeLocation(tree: ast.Tree, start_index: usize, token: ast.TokenIndex, encoding: Encoding) !TokenLocation {
|
||||||
const start = tree.tokens.items(.start)[token];
|
const start = tree.tokens.items(.start)[token];
|
||||||
|
|
||||||
var loc = TokenLocation{
|
var loc = TokenLocation{
|
||||||
@ -108,14 +109,14 @@ pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts the token is comprised of valid utf8
|
/// Asserts the token is comprised of valid utf8
|
||||||
pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encoding: Encoding) usize {
|
pub fn tokenLength(tree: ast.Tree, token: ast.TokenIndex, encoding: Encoding) usize {
|
||||||
const token_loc = tree.tokenLocation(0, token);
|
const token_loc = tokenLocation(tree, token);
|
||||||
if (encoding == .utf8)
|
if (encoding == .utf8)
|
||||||
return token_loc.line_end - token_loc.line_start;
|
return token_loc.end - token_loc.start;
|
||||||
|
|
||||||
var i: usize = token_loc.line_start;
|
var i: usize = token_loc.start;
|
||||||
var utf16_len: usize = 0;
|
var utf16_len: usize = 0;
|
||||||
while (i < token_loc.line_end) {
|
while (i < token_loc.end) {
|
||||||
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
|
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
|
||||||
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
|
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
|
||||||
if (codepoint < 0x10000) {
|
if (codepoint < 0x10000) {
|
||||||
@ -128,6 +129,28 @@ pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encodi
|
|||||||
return utf16_len;
|
return utf16_len;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Token location inside source
|
||||||
|
pub const Loc = struct {
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn tokenLocation(tree: ast.Tree, token_index: ast.TokenIndex) Loc {
|
||||||
|
const start = tree.tokens.items(.start)[token_index];
|
||||||
|
const tag = tree.tokens.items(.tag)[token_index];
|
||||||
|
|
||||||
|
// For some tokens, re-tokenization is needed to find the end.
|
||||||
|
var tokenizer: std.zig.Tokenizer = .{
|
||||||
|
.buffer = tree.source,
|
||||||
|
.index = start,
|
||||||
|
.pending_invalid_token = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
const token = tokenizer.next();
|
||||||
|
std.debug.assert(token.tag == tag);
|
||||||
|
return .{ .start = token.loc.start, .end = token.loc.end };
|
||||||
|
}
|
||||||
|
|
||||||
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
|
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
|
||||||
var line_idx: i64 = 0;
|
var line_idx: i64 = 0;
|
||||||
var curr_line: []const u8 = doc.text;
|
var curr_line: []const u8 = doc.text;
|
||||||
|
@ -64,12 +64,13 @@ const Builder = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
||||||
|
const starts = self.handle.tree.tokens.items(.start);
|
||||||
const start_idx = if (self.current_token) |current_token|
|
const start_idx = if (self.current_token) |current_token|
|
||||||
self.handle.tree.tokenLocation(0, current_token).line_start
|
starts[current_token]
|
||||||
else
|
else
|
||||||
0;
|
0;
|
||||||
|
|
||||||
if (start_idx > self.handle.tree.tokenLocation(0, token).line_start)
|
if (start_idx > starts[token])
|
||||||
return;
|
return;
|
||||||
|
|
||||||
const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return;
|
const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return;
|
||||||
@ -268,6 +269,8 @@ fn writeNodeTokens(
|
|||||||
maybe_node: ?ast.Node.Index,
|
maybe_node: ?ast.Node.Index,
|
||||||
) error{OutOfMemory}!void {
|
) error{OutOfMemory}!void {
|
||||||
if (maybe_node == null) return;
|
if (maybe_node == null) return;
|
||||||
|
const node = maybe_node.?;
|
||||||
|
if (node == 0) return;
|
||||||
|
|
||||||
const handle = builder.handle;
|
const handle = builder.handle;
|
||||||
const tree = handle.tree;
|
const tree = handle.tree;
|
||||||
@ -275,9 +278,8 @@ fn writeNodeTokens(
|
|||||||
const token_tags = tree.tokens.items(.tag);
|
const token_tags = tree.tokens.items(.tag);
|
||||||
const datas = tree.nodes.items(.data);
|
const datas = tree.nodes.items(.data);
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
if (node > datas.len) return;
|
||||||
|
|
||||||
const node = maybe_node.?;
|
|
||||||
if (node > node_tags.len) return;
|
|
||||||
const tag = node_tags[node];
|
const tag = node_tags[node];
|
||||||
const main_token = main_tokens[node];
|
const main_token = main_tokens[node];
|
||||||
|
|
||||||
@ -286,18 +288,16 @@ fn writeNodeTokens(
|
|||||||
defer arena.child_allocator.free(child_frame);
|
defer arena.child_allocator.free(child_frame);
|
||||||
|
|
||||||
switch (tag) {
|
switch (tag) {
|
||||||
.root => {
|
.root => unreachable,
|
||||||
var gap_highlighter = GapHighlighter.init(builder, 0);
|
.container_field,
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
.container_field_align,
|
||||||
for (analysis.declMembers(tree, .root, 0, &buf)) |child| {
|
.container_field_init,
|
||||||
try gap_highlighter.next(child);
|
=> try writeContainerField(builder, arena, store, node, .field, child_frame),
|
||||||
if (node_tags[child].isContainerField()) {
|
.@"errdefer" => {
|
||||||
try writeContainerField(builder, arena, store, child, .field, child_frame);
|
if (datas[node].lhs != 0)
|
||||||
} else {
|
try writeToken(builder, datas[node].lhs, .variable);
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
|
|
||||||
}
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||||
}
|
|
||||||
try gap_highlighter.end(@truncate(u32, tree.tokens.len) - 1);
|
|
||||||
},
|
},
|
||||||
.block,
|
.block,
|
||||||
.block_semicolon,
|
.block_semicolon,
|
||||||
@ -381,12 +381,21 @@ fn writeNodeTokens(
|
|||||||
.container_decl_two_trailing,
|
.container_decl_two_trailing,
|
||||||
.container_decl_arg,
|
.container_decl_arg,
|
||||||
.container_decl_arg_trailing,
|
.container_decl_arg_trailing,
|
||||||
|
.tagged_union,
|
||||||
|
.tagged_union_trailing,
|
||||||
|
.tagged_union_enum_tag,
|
||||||
|
.tagged_union_enum_tag_trailing,
|
||||||
|
.tagged_union_two,
|
||||||
|
.tagged_union_two_trailing,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]ast.Node.Index = undefined;
|
||||||
const decl: ast.full.ContainerDecl = switch (tag) {
|
const decl: ast.full.ContainerDecl = switch (tag) {
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
||||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
||||||
|
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node),
|
||||||
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node),
|
||||||
|
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node),
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -632,7 +641,7 @@ fn writeNodeTokens(
|
|||||||
.handle = handle,
|
.handle = handle,
|
||||||
})) |struct_type| switch (struct_type.type.data) {
|
})) |struct_type| switch (struct_type.type.data) {
|
||||||
.other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node]))
|
.other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node]))
|
||||||
fieldTokenType(type_node, handle)
|
fieldTokenType(type_node, struct_type.handle)
|
||||||
else
|
else
|
||||||
null,
|
null,
|
||||||
else => null,
|
else => null,
|
||||||
@ -644,11 +653,9 @@ fn writeNodeTokens(
|
|||||||
try gap_highlighter.next(field_init);
|
try gap_highlighter.next(field_init);
|
||||||
|
|
||||||
const init_token = tree.firstToken(field_init);
|
const init_token = tree.firstToken(field_init);
|
||||||
if (field_token_type) |tok_type| {
|
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
|
||||||
try writeToken(builder, init_token - 3, tok_type);
|
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
|
||||||
try writeToken(builder, init_token - 2, tok_type);
|
try writeToken(builder, init_token - 1, .operator); // '='
|
||||||
}
|
|
||||||
try writeToken(builder, init_token - 1, .operator);
|
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init });
|
||||||
}
|
}
|
||||||
try gap_highlighter.end(tree.lastToken(node));
|
try gap_highlighter.end(tree.lastToken(node));
|
||||||
@ -712,7 +719,6 @@ fn writeNodeTokens(
|
|||||||
.grouped_expression => {
|
.grouped_expression => {
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||||
},
|
},
|
||||||
.@"return",
|
|
||||||
.@"break",
|
.@"break",
|
||||||
.@"continue",
|
.@"continue",
|
||||||
=> {
|
=> {
|
||||||
@ -722,7 +728,7 @@ fn writeNodeTokens(
|
|||||||
if (datas[node].rhs != 0)
|
if (datas[node].rhs != 0)
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
|
||||||
},
|
},
|
||||||
.@"suspend" => {
|
.@"suspend", .@"return" => {
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
if (datas[node].lhs != 0)
|
if (datas[node].lhs != 0)
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||||
@ -872,6 +878,8 @@ fn writeNodeTokens(
|
|||||||
if (data.rhs == 0) return;
|
if (data.rhs == 0) return;
|
||||||
const rhs_str = tree.tokenSlice(data.rhs);
|
const rhs_str = tree.tokenSlice(data.rhs);
|
||||||
|
|
||||||
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs });
|
||||||
|
|
||||||
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
|
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
|
||||||
// writeToken code.
|
// writeToken code.
|
||||||
// Maybe we can hook into it insead? Also applies to Identifier and VarDecl
|
// Maybe we can hook into it insead? Also applies to Identifier and VarDecl
|
||||||
@ -935,7 +943,7 @@ fn writeNodeTokens(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
try writeToken(builder, main_token, .operator);
|
if (ptr_type.size == .One) try writeToken(builder, main_token, .operator);
|
||||||
if (ptr_type.ast.sentinel != 0) {
|
if (ptr_type.ast.sentinel != 0) {
|
||||||
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
|
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
|
||||||
builder,
|
builder,
|
||||||
@ -995,7 +1003,7 @@ fn writeNodeTokens(
|
|||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
|
||||||
},
|
},
|
||||||
else => std.log.scoped(.semantic_tokens).debug("TODO: {s}", .{tag}),
|
.anyframe_literal => try writeToken(builder, main_token, .keyword),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1003,7 +1011,16 @@ fn writeNodeTokens(
|
|||||||
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
|
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
|
||||||
var builder = Builder.init(arena.child_allocator, handle, encoding);
|
var builder = Builder.init(arena.child_allocator, handle, encoding);
|
||||||
|
|
||||||
|
// reverse the ast from the root declarations
|
||||||
|
var gap_highlighter = GapHighlighter.init(&builder, 0);
|
||||||
|
var buf: [2]ast.Node.Index = undefined;
|
||||||
|
for (analysis.declMembers(handle.tree, .root, 0, &buf)) |child| {
|
||||||
|
try gap_highlighter.next(child);
|
||||||
|
try writeNodeTokens(&builder, arena, store, child);
|
||||||
|
}
|
||||||
|
|
||||||
|
try gap_highlighter.end(@truncate(u32, handle.tree.tokens.len) - 1);
|
||||||
// pass root node, which always has index '0'
|
// pass root node, which always has index '0'
|
||||||
try writeNodeTokens(&builder, arena, store, 0);
|
// try writeNodeTokens(&builder, arena, store, 0);
|
||||||
return builder.toOwnedSlice();
|
return builder.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user