Always send an insertText field in completion items
This commit is contained in:
parent
c063ca4c60
commit
48b5ca5385
@ -111,7 +111,7 @@ pub fn getFunctionSnippet(
|
|||||||
func: ast.full.FnProto,
|
func: ast.full.FnProto,
|
||||||
skip_self_param: bool,
|
skip_self_param: bool,
|
||||||
) ![]const u8 {
|
) ![]const u8 {
|
||||||
const name_index = func.name_token orelse unreachable;
|
const name_index = func.name_token.?;
|
||||||
|
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
try buffer.ensureCapacity(128);
|
try buffer.ensureCapacity(128);
|
||||||
@ -2601,7 +2601,7 @@ fn makeScopeInternal(
|
|||||||
|
|
||||||
if (isContainer(tree, node_idx)) {
|
if (isContainer(tree, node_idx)) {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]ast.Node.Index = undefined;
|
||||||
const ast_decls = declMembers(tree,node_idx, &buf);
|
const ast_decls = declMembers(tree, node_idx, &buf);
|
||||||
|
|
||||||
(try scopes.addOne(allocator)).* = .{
|
(try scopes.addOne(allocator)).* = .{
|
||||||
.range = nodeSourceRange(tree, node_idx),
|
.range = nodeSourceRange(tree, node_idx),
|
||||||
@ -2638,10 +2638,10 @@ fn makeScopeInternal(
|
|||||||
(try error_completions.addOne(allocator)).* = .{
|
(try error_completions.addOne(allocator)).* = .{
|
||||||
.label = name,
|
.label = name,
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs|
|
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| .{
|
||||||
.{ .kind = .Markdown, .value = docs }
|
.kind = .Markdown,
|
||||||
else
|
.value = docs,
|
||||||
null,
|
} else null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2676,7 +2676,9 @@ fn makeScopeInternal(
|
|||||||
|
|
||||||
if (container_decl) |container| {
|
if (container_decl) |container| {
|
||||||
const kind = token_tags[container.ast.main_token];
|
const kind = token_tags[container.ast.main_token];
|
||||||
if (empty_field and (kind == .keyword_struct or (kind == .keyword_union and container.ast.arg == 0))) {
|
if (empty_field and
|
||||||
|
(kind == .keyword_struct or (kind == .keyword_union and container.ast.arg == 0)))
|
||||||
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2684,10 +2686,10 @@ fn makeScopeInternal(
|
|||||||
(try enum_completions.addOne(allocator)).* = .{
|
(try enum_completions.addOne(allocator)).* = .{
|
||||||
.label = name,
|
.label = name,
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs|
|
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| .{
|
||||||
.{ .kind = .Markdown, .value = docs }
|
.kind = .Markdown,
|
||||||
else
|
.value = docs,
|
||||||
null,
|
} else null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2926,20 +2928,17 @@ fn makeScopeInternal(
|
|||||||
std.debug.assert(token_tags[name_token] == .identifier);
|
std.debug.assert(token_tags[name_token] == .identifier);
|
||||||
|
|
||||||
const name = tree.tokenSlice(name_token);
|
const name = tree.tokenSlice(name_token);
|
||||||
try scope.decls.putNoClobber(name, if (is_for)
|
try scope.decls.putNoClobber(name, if (is_for) .{
|
||||||
.{
|
.array_payload = .{
|
||||||
.array_payload = .{
|
.identifier = name_token,
|
||||||
.identifier = name_token,
|
.array_expr = while_node.ast.cond_expr,
|
||||||
.array_expr = while_node.ast.cond_expr,
|
},
|
||||||
},
|
} else .{
|
||||||
}
|
.pointer_payload = .{
|
||||||
else
|
.name = name_token,
|
||||||
.{
|
.condition = while_node.ast.cond_expr,
|
||||||
.pointer_payload = .{
|
},
|
||||||
.name = name_token,
|
});
|
||||||
.condition = while_node.ast.cond_expr,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// for loop with index as well
|
// for loop with index as well
|
||||||
if (token_tags[name_token + 1] == .comma) {
|
if (token_tags[name_token + 1] == .comma) {
|
||||||
|
68
src/main.zig
68
src/main.zig
@ -297,10 +297,14 @@ fn typeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "len",
|
.label = "len",
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
|
.insertText = "len",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "ptr",
|
.label = "ptr",
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
|
.insertText = "ptr",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -310,6 +314,8 @@ fn typeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "*",
|
.label = "*",
|
||||||
.kind = .Operator,
|
.kind = .Operator,
|
||||||
|
.insertText = "*",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
try nodeToCompletion(
|
try nodeToCompletion(
|
||||||
@ -425,7 +431,7 @@ fn nodeToCompletion(
|
|||||||
} else false;
|
} else false;
|
||||||
|
|
||||||
break :blk try analysis.getFunctionSnippet(&arena.allocator, tree, func, skip_self_param);
|
break :blk try analysis.getFunctionSnippet(&arena.allocator, tree, func, skip_self_param);
|
||||||
} else "";
|
} else tree.tokenSlice(func.name_token.?);
|
||||||
|
|
||||||
const is_type_function = analysis.isTypeFunction(handle.tree, func);
|
const is_type_function = analysis.isTypeFunction(handle.tree, func);
|
||||||
|
|
||||||
@ -462,6 +468,8 @@ fn nodeToCompletion(
|
|||||||
.kind = if (is_const) .Constant else .Variable,
|
.kind = if (is_const) .Constant else .Variable,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getVariableSignature(tree, var_decl),
|
.detail = analysis.getVariableSignature(tree, var_decl),
|
||||||
|
.insertText = tree.tokenSlice(var_decl.ast.mut_token + 1),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.container_field,
|
.container_field,
|
||||||
@ -474,6 +482,8 @@ fn nodeToCompletion(
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getContainerFieldSignature(handle.tree, field),
|
.detail = analysis.getContainerFieldSignature(handle.tree, field),
|
||||||
|
.insertText = tree.tokenSlice(field.ast.name_token),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.array_type,
|
.array_type,
|
||||||
@ -482,6 +492,8 @@ fn nodeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "len",
|
.label = "len",
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
|
.insertText = "len",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.ptr_type,
|
.ptr_type,
|
||||||
@ -496,11 +508,23 @@ fn nodeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "*",
|
.label = "*",
|
||||||
.kind = .Operator,
|
.kind = .Operator,
|
||||||
|
.insertText = "*",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.Slice => {
|
.Slice => {
|
||||||
try list.append(.{ .label = "ptr", .kind = .Field });
|
try list.append(.{
|
||||||
try list.append(.{ .label = "len", .kind = .Field });
|
.label = "ptr",
|
||||||
|
.kind = .Field,
|
||||||
|
.insertText = "ptr",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
|
});
|
||||||
|
try list.append(.{
|
||||||
|
.label = "len",
|
||||||
|
.kind = .Field,
|
||||||
|
.insertText = "len",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
|
});
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -515,6 +539,8 @@ fn nodeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "?",
|
.label = "?",
|
||||||
.kind = .Operator,
|
.kind = .Operator,
|
||||||
|
.insertText = "?",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
@ -523,6 +549,8 @@ fn nodeToCompletion(
|
|||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = "len",
|
.label = "len",
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
|
.insertText = "len",
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
else => if (analysis.nodeToString(tree, node)) |string| {
|
else => if (analysis.nodeToString(tree, node)) |string| {
|
||||||
@ -531,6 +559,8 @@ fn nodeToCompletion(
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = tree.getNodeSource(node),
|
.detail = tree.getNodeSource(node),
|
||||||
|
.insertText = string,
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -963,36 +993,48 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
|||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end],
|
.detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end],
|
||||||
|
.insertText = tree.tokenSlice(param.name_token.?),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.pointer_payload => |payload| {
|
.pointer_payload => |payload| {
|
||||||
try context.completions.append(.{
|
try context.completions.append(.{
|
||||||
.label = tree.tokenSlice(payload.name),
|
.label = tree.tokenSlice(payload.name),
|
||||||
.kind = .Variable,
|
.kind = .Variable,
|
||||||
|
.insertText = tree.tokenSlice(payload.name),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.array_payload => |payload| {
|
.array_payload => |payload| {
|
||||||
try context.completions.append(.{
|
try context.completions.append(.{
|
||||||
.label = tree.tokenSlice(payload.identifier),
|
.label = tree.tokenSlice(payload.identifier),
|
||||||
.kind = .Variable,
|
.kind = .Variable,
|
||||||
|
.insertText = tree.tokenSlice(payload.identifier),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.array_index => |payload| {
|
.array_index => |payload| {
|
||||||
try context.completions.append(.{
|
try context.completions.append(.{
|
||||||
.label = tree.tokenSlice(payload),
|
.label = tree.tokenSlice(payload),
|
||||||
.kind = .Variable,
|
.kind = .Variable,
|
||||||
|
.insertText = tree.tokenSlice(payload),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.switch_payload => |payload| {
|
.switch_payload => |payload| {
|
||||||
try context.completions.append(.{
|
try context.completions.append(.{
|
||||||
.label = tree.tokenSlice(payload.node),
|
.label = tree.tokenSlice(payload.node),
|
||||||
.kind = .Variable,
|
.kind = .Variable,
|
||||||
|
.insertText = tree.tokenSlice(payload.node),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.label_decl => |label_decl| {
|
.label_decl => |label_decl| {
|
||||||
try context.completions.append(.{
|
try context.completions.append(.{
|
||||||
.label = tree.tokenSlice(label_decl),
|
.label = tree.tokenSlice(label_decl),
|
||||||
.kind = .Variable,
|
.kind = .Variable,
|
||||||
|
.insertText = tree.tokenSlice(label_decl),
|
||||||
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -1044,15 +1086,15 @@ fn completeBuiltin(arena: *std.heap.ArenaAllocator, id: types.RequestId, config:
|
|||||||
} else {
|
} else {
|
||||||
insert_text = builtin.name;
|
insert_text = builtin.name;
|
||||||
}
|
}
|
||||||
builtin_completions.?[idx].insertText =
|
builtin_completions.?[idx].insertText =
|
||||||
if (config.include_at_in_builtins)
|
if (config.include_at_in_builtins)
|
||||||
insert_text
|
insert_text
|
||||||
else
|
else
|
||||||
insert_text[1..];
|
insert_text[1..];
|
||||||
}
|
}
|
||||||
truncateCompletions(builtin_completions.?, config.max_detail_length);
|
truncateCompletions(builtin_completions.?, config.max_detail_length);
|
||||||
}
|
}
|
||||||
|
|
||||||
try send(arena, types.Response{
|
try send(arena, types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
@ -1115,9 +1157,9 @@ fn completeFieldAccess(
|
|||||||
|
|
||||||
fn completeError(
|
fn completeError(
|
||||||
arena: *std.heap.ArenaAllocator,
|
arena: *std.heap.ArenaAllocator,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
config: Config
|
config: Config,
|
||||||
) !void {
|
) !void {
|
||||||
const completions = try document_store.errorCompletionItems(arena, handle);
|
const completions = try document_store.errorCompletionItems(arena, handle);
|
||||||
truncateCompletions(completions, config.max_detail_length);
|
truncateCompletions(completions, config.max_detail_length);
|
||||||
@ -1135,13 +1177,13 @@ fn completeError(
|
|||||||
|
|
||||||
fn completeDot(
|
fn completeDot(
|
||||||
arena: *std.heap.ArenaAllocator,
|
arena: *std.heap.ArenaAllocator,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
config: Config
|
config: Config,
|
||||||
) !void {
|
) !void {
|
||||||
var completions = try document_store.enumCompletionItems(arena, handle);
|
var completions = try document_store.enumCompletionItems(arena, handle);
|
||||||
truncateCompletions(completions, config.max_detail_length);
|
truncateCompletions(completions, config.max_detail_length);
|
||||||
|
|
||||||
try send(arena, types.Response{
|
try send(arena, types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
|
Loading…
Reference in New Issue
Block a user