Merge pull request #1212 from Techatrix/param-slice

add `paramSlice` helper function
This commit is contained in:
Lee Cannon 2023-05-30 20:56:21 +01:00 committed by GitHub
commit f06c75157c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 13 additions and 36 deletions

View File

@ -991,6 +991,15 @@ pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
}
pub fn paramSlice(tree: Ast, param: Ast.full.FnProto.Param) []const u8 {
const first_token = paramFirstToken(tree, param);
const last_token = paramLastToken(tree, param);
const start = offsets.tokenToIndex(tree, first_token);
const end = offsets.tokenToLoc(tree, last_token).end;
return tree.source[start..end];
}
pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.container_decl,

View File

@ -370,14 +370,11 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: Analyser.Decl
try Analyser.collectDocComments(allocator, tree, doc_comments, doc_kind, false),
} } else null;
const first_token = ast.paramFirstToken(tree, param);
const last_token = ast.paramLastToken(tree, param);
try context.completions.append(allocator, .{
.label = tree.tokenSlice(param.name_token.?),
.kind = .Constant,
.documentation = doc,
.detail = tree.source[offsets.tokenToIndex(tree, first_token)..offsets.tokenToLoc(tree, last_token).end],
.detail = ast.paramSlice(tree, param),
.insertText = tree.tokenSlice(param.name_token.?),
.insertTextFormat = .PlainText,
});

View File

@ -48,12 +48,7 @@ pub fn hoverSymbol(server: *Server, decl_handle: Analyser.DeclWithHandle, markup
doc_str = try Analyser.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, markup_kind, false);
}
const first_token = ast.paramFirstToken(tree, param);
const last_token = ast.paramLastToken(tree, param);
const start = offsets.tokenToIndex(tree, first_token);
const end = offsets.tokenToLoc(tree, last_token).end;
break :def tree.source[start..end];
break :def ast.paramSlice(tree, param);
},
.pointer_payload,
.array_payload,

View File

@ -7,12 +7,12 @@ const DocumentStore = @import("../DocumentStore.zig");
const types = @import("../lsp.zig");
const Server = @import("../Server.zig");
const ast = @import("../ast.zig");
const offsets = @import("../offsets.zig");
const data = @import("../data/data.zig");
fn fnProtoToSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
const tree = handle.tree;
const token_starts = tree.tokens.items(.start);
const label = Analyser.getFunctionSignature(tree, proto);
const proto_comments = (try Analyser.getDocComments(alloc, tree, fn_node, .markdown)) orelse "";
@ -29,32 +29,8 @@ fn fnProtoToSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, commas:
else
"";
var param_label_start: usize = 0;
var param_label_end: usize = 0;
if (param.comptime_noalias) |cn| {
param_label_start = token_starts[cn];
param_label_end = param_label_start + tree.tokenSlice(cn).len;
}
if (param.name_token) |nt| {
if (param_label_start == 0)
param_label_start = token_starts[nt];
param_label_end = token_starts[nt] + tree.tokenSlice(nt).len;
}
if (param.anytype_ellipsis3) |ae| {
if (param_label_start == 0)
param_label_start = token_starts[ae];
param_label_end = token_starts[ae] + tree.tokenSlice(ae).len;
}
if (param.type_expr != 0) {
if (param_label_start == 0)
param_label_start = token_starts[tree.firstToken(param.type_expr)];
const last_param_tok = ast.lastToken(tree, param.type_expr);
param_label_end = token_starts[last_param_tok] + tree.tokenSlice(last_param_tok).len;
}
const param_label = tree.source[param_label_start..param_label_end];
try params.append(alloc, .{
.label = .{ .string = param_label },
.label = .{ .string = ast.paramSlice(tree, param) },
.documentation = .{ .MarkupContent = .{
.kind = .markdown,
.value = param_comments,