add completion for arrays and slices

This commit is contained in:
Vexu 2020-05-17 17:23:04 +03:00
parent a2402b9885
commit 6e8165804f
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC
2 changed files with 97 additions and 73 deletions

View File

@ -12,7 +12,7 @@ pub fn getFunctionByName(tree: *ast.Tree, name: []const u8) ?*ast.Node.FnProto {
const func = decl.cast(ast.Node.FnProto).?; const func = decl.cast(ast.Node.FnProto).?;
if (std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return func; if (std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return func;
}, },
else => {} else => {},
} }
} }
@ -57,7 +57,7 @@ pub fn getDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast
return try collectDocComments(allocator, tree, doc_comments); return try collectDocComments(allocator, tree, doc_comments);
} }
}, },
else => {} else => {},
} }
return null; return null;
} }
@ -102,8 +102,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func:
const param = param_ptr.*; const param = param_ptr.*;
const param_decl = param.cast(ast.Node.ParamDecl).?; const param_decl = param.cast(ast.Node.ParamDecl).?;
if (param_num != 1) try buffer.appendSlice(", ${") if (param_num != 1) try buffer.appendSlice(", ${") else try buffer.appendSlice("${");
else try buffer.appendSlice("${");
try buf_stream.print("{}:", .{param_num}); try buf_stream.print("{}:", .{param_num});
@ -135,7 +134,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func:
try buffer.appendSlice(tree.tokenSlice(curr_tok)); try buffer.appendSlice(tree.tokenSlice(curr_tok));
if (is_comma or id == .Keyword_const) try buffer.append(' '); if (is_comma or id == .Keyword_const) try buffer.append(' ');
} }
} },
} }
try buffer.append('}'); try buffer.append('}');
@ -159,6 +158,16 @@ pub fn getParamSignature(tree: *ast.Tree, param: *ast.Node.ParamDecl) []const u8
return tree.source[start..end]; return tree.source[start..end];
} }
pub fn isTypeFunction(tree: *ast.Tree, func: *ast.Node.FnProto) bool {
switch (func.return_type) {
.Explicit => |node| return if (node.cast(std.zig.ast.Node.Identifier)) |ident|
std.mem.eql(u8, tree.tokenSlice(ident.token), "type")
else
false,
.InferErrorSet, .Invalid => return false,
}
}
// STYLE // STYLE
pub fn isCamelCase(name: []const u8) bool { pub fn isCamelCase(name: []const u8) bool {
@ -188,7 +197,7 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node {
const field = child.cast(ast.Node.ContainerField).?; const field = child.cast(ast.Node.ContainerField).?;
if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child;
}, },
else => {} else => {},
} }
index += 1; index += 1;
} }
@ -216,7 +225,7 @@ pub fn getChildOfSlice(tree: *ast.Tree, nodes: []*ast.Node, name: []const u8) ?*
const field = child.cast(ast.Node.ContainerField).?; const field = child.cast(ast.Node.ContainerField).?;
if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child;
}, },
else => {} else => {},
} }
// index += 1; // index += 1;
} }
@ -237,7 +246,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
.var_type, .type_expr => |var_type| { .var_type, .type_expr => |var_type| {
return resolveTypeOfNode(analysis_ctx, var_type) orelse null; return resolveTypeOfNode(analysis_ctx, var_type) orelse null;
}, },
else => {} else => {},
} }
}, },
.FnProto => { .FnProto => {
@ -270,7 +279,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
.Call, .StructInitializer => { .Call, .StructInitializer => {
return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node); return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node);
}, },
else => {} else => {},
} }
}, },
.InfixOp => { .InfixOp => {
@ -285,16 +294,22 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null; const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null;
return resolveTypeOfNode(analysis_ctx, getChild(analysis_ctx.tree, left, rhs_str) orelse return null); return resolveTypeOfNode(analysis_ctx, getChild(analysis_ctx.tree, left, rhs_str) orelse return null);
}, },
else => {} else => {},
} }
}, },
.PrefixOp => { .PrefixOp => {
const prefix_op = node.cast(ast.Node.PrefixOp).?; const prefix_op = node.cast(ast.Node.PrefixOp).?;
switch (prefix_op.op) { switch (prefix_op.op) {
.SliceType, .ArrayType => return node,
.PtrType => { .PtrType => {
return resolveTypeOfNode(analysis_ctx, prefix_op.rhs); const op_token = analysis_ctx.tree.tokens.at(prefix_op.op_token);
switch (op_token.id) {
.Asterisk => return resolveTypeOfNode(analysis_ctx, prefix_op.rhs),
.LBracket, .AsteriskAsterisk => return null,
else => unreachable,
}
}, },
else => {} else => {},
} }
}, },
.BuiltinCall => { .BuiltinCall => {
@ -307,13 +322,16 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: {
std.debug.warn("Error {} while processing import {}\n", .{err, import_str}); std.debug.warn("Error {} while processing import {}\n", .{ err, import_str });
break :block null; break :block null;
}; };
}, },
.MultilineStringLiteral, .StringLiteral => {
return node;
},
else => { else => {
std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); std.debug.warn("Type resolution case not implemented; {}\n", .{node.id});
} },
} }
return null; return null;
} }
@ -341,7 +359,7 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const
const var_decl = decl.cast(ast.Node.VarDecl).?; const var_decl = decl.cast(ast.Node.VarDecl).?;
if (var_decl.init_node == null) continue; if (var_decl.init_node == null) continue;
switch(var_decl.init_node.?.id) { switch (var_decl.init_node.?.id) {
.BuiltinCall => { .BuiltinCall => {
const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?;
try maybeCollectImport(tree, builtin_call, &arr); try maybeCollectImport(tree, builtin_call, &arr);
@ -349,7 +367,7 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const
.InfixOp => { .InfixOp => {
const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?;
switch(infix_op.op) { switch (infix_op.op) {
.Period => {}, .Period => {},
else => continue, else => continue,
} }
@ -395,7 +413,7 @@ pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zi
}, },
else => { else => {
std.debug.warn("Not implemented; {}\n", .{next.id}); std.debug.warn("Not implemented; {}\n", .{next.id});
} },
} }
} }
@ -412,15 +430,8 @@ pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool {
const func = node.cast(ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
return func.visib_token != null; return func.visib_token != null;
}, },
.ContainerField, .ErrorTag => { else => return true,
return true;
},
else => {
return false;
}
} }
return false;
} }
pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
@ -445,7 +456,7 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
}, },
else => { else => {
std.debug.warn("INVALID: {}\n", .{node.id}); std.debug.warn("INVALID: {}\n", .{node.id});
} },
} }
return null; return null;
@ -479,7 +490,7 @@ pub fn declsFromIndexInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
}, },
else => { else => {
try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node));
} },
} }
} }

View File

@ -142,13 +142,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
const loc = tree.tokenLocation(0, name_token); const loc = tree.tokenLocation(0, name_token);
const is_type_function = switch (func.return_type) { const is_type_function = analysis.isTypeFunction(tree, func);
.Explicit => |node| if (node.cast(std.zig.ast.Node.Identifier)) |ident|
std.mem.eql(u8, tree.tokenSlice(ident.token), "type")
else
false,
.InferErrorSet, .Invalid => false,
};
const func_name = tree.tokenSlice(name_token); const func_name = tree.tokenSlice(name_token);
if (!is_type_function and !analysis.isCamelCase(func_name)) { if (!is_type_function and !analysis.isCamelCase(func_name)) {
@ -187,8 +181,17 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
}); });
} }
fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *std.zig.ast.Node, config: Config) !?types.CompletionItem { fn containerToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, container: *std.zig.ast.Node, config: Config) !void {
var doc = if (try analysis.getDocComments(alloc, tree, decl)) |doc_comments| var index: usize = 0;
while (container.iterate(index)) |child_node| : (index+=1) {
if (analysis.isNodePublic(tree, child_node)) {
try nodeToCompletion(list, tree, child_node, config);
}
}
}
fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, config: Config) error{OutOfMemory}!void {
var doc = if (try analysis.getDocComments(list.allocator, tree, node)) |doc_comments|
types.MarkupContent{ types.MarkupContent{
.kind = .Markdown, .kind = .Markdown,
.value = doc_comments, .value = doc_comments,
@ -196,54 +199,74 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s
else else
null; null;
switch (decl.id) { switch (node.id) {
.ErrorSetDecl, .Root, .ContainerDecl => {
try containerToCompletion(list, tree, node, config);
},
.FnProto => { .FnProto => {
const func = decl.cast(std.zig.ast.Node.FnProto).?; const func = node.cast(std.zig.ast.Node.FnProto).?;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
const insert_text = if (config.enable_snippets) const insert_text = if (config.enable_snippets)
try analysis.getFunctionSnippet(alloc, tree, func) try analysis.getFunctionSnippet(list.allocator, tree, func)
else else
null; null;
return types.CompletionItem{ const is_type_function = analysis.isTypeFunction(tree, func);
try list.append(.{
.label = tree.tokenSlice(name_token), .label = tree.tokenSlice(name_token),
.kind = .Function, .kind = if (is_type_function) .Struct else .Function,
.documentation = doc, .documentation = doc,
.detail = analysis.getFunctionSignature(tree, func), .detail = analysis.getFunctionSignature(tree, func),
.insertText = insert_text, .insertText = insert_text,
.insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText, .insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText,
}; });
} }
}, },
.VarDecl => { .VarDecl => {
const var_decl = decl.cast(std.zig.ast.Node.VarDecl).?; const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
return types.CompletionItem{ const is_const = tree.tokens.at(var_decl.mut_token).id == .Keyword_const;
try list.append(.{
.label = tree.tokenSlice(var_decl.name_token), .label = tree.tokenSlice(var_decl.name_token),
.kind = .Variable, .kind = if (is_const) .Constant else .Variable,
.documentation = doc, .documentation = doc,
.detail = analysis.getVariableSignature(tree, var_decl), .detail = analysis.getVariableSignature(tree, var_decl),
}; });
}, },
.ParamDecl => { .ParamDecl => {
const param = decl.cast(std.zig.ast.Node.ParamDecl).?; const param = node.cast(std.zig.ast.Node.ParamDecl).?;
if (param.name_token) |name_token| if (param.name_token) |name_token|
return types.CompletionItem{ try list.append(.{
.label = tree.tokenSlice(name_token), .label = tree.tokenSlice(name_token),
.kind = .Variable, .kind = .Constant,
.documentation = doc, .documentation = doc,
.detail = analysis.getParamSignature(tree, param), .detail = analysis.getParamSignature(tree, param),
}; });
}, },
else => if (analysis.nodeToString(tree, decl)) |string| { .PrefixOp => {
return types.CompletionItem{ try list.append(.{
.label = "len",
.kind = .Field,
});
try list.append(.{
.label = "ptr",
.kind = .Field,
});
},
.StringLiteral => {
try list.append(.{
.label = "len",
.kind = .Field,
});
},
else => if (analysis.nodeToString(tree, node)) |string| {
try list.append(.{
.label = string, .label = string,
.kind = .Field, .kind = .Field,
.documentation = doc, .documentation = doc,
}; });
}, },
} }
return null;
} }
fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, config: Config) !void { fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, config: Config) !void {
@ -260,9 +283,7 @@ fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, confi
var decls = try analysis.declsFromIndex(&arena.allocator, tree, pos_index); var decls = try analysis.declsFromIndex(&arena.allocator, tree, pos_index);
for (decls) |decl_ptr| { for (decls) |decl_ptr| {
var decl = decl_ptr.*; var decl = decl_ptr.*;
if (try nodeToCompletion(&arena.allocator, tree, decl_ptr, config)) |completion| { try nodeToCompletion(&completions, tree, decl_ptr, config);
try completions.append(completion);
}
} }
try send(types.Response{ try send(types.Response{
@ -290,15 +311,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
// var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position));
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| { if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| {
var index: usize = 0; try nodeToCompletion(&completions, analysis_ctx.tree, node, config);
while (node.iterate(index)) |child_node| {
if (analysis.isNodePublic(analysis_ctx.tree, child_node)) {
if (try nodeToCompletion(&arena.allocator, analysis_ctx.tree, child_node, config)) |completion| {
try completions.append(completion);
}
}
index += 1;
}
} }
try send(types.Response{ try send(types.Response{