Merge pull request #23 from Vexu/err

Implement completion for error sets
This commit is contained in:
Auguste Rame 2020-05-14 13:46:15 -04:00 committed by GitHub
commit 8468207da3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -30,37 +30,48 @@ pub fn getDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast
.FnProto => { .FnProto => {
const func = node.cast(ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
if (func.doc_comments) |doc_comments| { if (func.doc_comments) |doc_comments| {
var doc_it = doc_comments.lines.iterator(0); return try collectDocComments(allocator, tree, doc_comments);
var lines = std.ArrayList([]const u8).init(allocator);
defer lines.deinit();
while (doc_it.next()) |doc_comment| {
_ = try lines.append(std.fmt.trim(tree.tokenSlice(doc_comment.*)[3..]));
}
return try std.mem.join(allocator, "\n", lines.items);
} else {
return null;
} }
}, },
.VarDecl => { .VarDecl => {
const var_decl = node.cast(ast.Node.VarDecl).?; const var_decl = node.cast(ast.Node.VarDecl).?;
if (var_decl.doc_comments) |doc_comments| { if (var_decl.doc_comments) |doc_comments| {
var doc_it = doc_comments.lines.iterator(0); return try collectDocComments(allocator, tree, doc_comments);
var lines = std.ArrayList([]const u8).init(allocator);
defer lines.deinit();
while (doc_it.next()) |doc_comment| {
_ = try lines.append(std.fmt.trim(tree.tokenSlice(doc_comment.*)[3..]));
}
return try std.mem.join(allocator, "\n", lines.items);
} else {
return null;
} }
}, },
else => return null .ContainerField => {
const field = node.cast(ast.Node.ContainerField).?;
if (field.doc_comments) |doc_comments| {
return try collectDocComments(allocator, tree, doc_comments);
}
},
.ErrorTag => {
const tag = node.cast(ast.Node.ErrorTag).?;
if (tag.doc_comments) |doc_comments| {
return try collectDocComments(allocator, tree, doc_comments);
}
},
.ParamDecl => {
const param = node.cast(ast.Node.ParamDecl).?;
if (param.doc_comments) |doc_comments| {
return try collectDocComments(allocator, tree, doc_comments);
}
},
else => {}
} }
return null;
}
fn collectDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, doc_comments: *ast.Node.DocComment) ![]const u8 {
var doc_it = doc_comments.lines.iterator(0);
var lines = std.ArrayList([]const u8).init(allocator);
defer lines.deinit();
while (doc_it.next()) |doc_comment| {
_ = try lines.append(std.fmt.trim(tree.tokenSlice(doc_comment.*)[3..]));
}
return try std.mem.join(allocator, "\n", lines.items);
} }
/// Gets a function signature (keywords, name, return value) /// Gets a function signature (keywords, name, return value)
@ -203,6 +214,9 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
const field = node.cast(ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return resolveTypeOfNode(analysis_ctx, field.type_expr orelse return null); return resolveTypeOfNode(analysis_ctx, field.type_expr orelse return null);
}, },
.ErrorSetDecl => {
return node;
},
.SuffixOp => { .SuffixOp => {
const suffix_op = node.cast(ast.Node.SuffixOp).?; const suffix_op = node.cast(ast.Node.SuffixOp).?;
switch (suffix_op.op) { switch (suffix_op.op) {
@ -372,6 +386,10 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
const field = node.cast(ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return tree.tokenSlice(field.name_token); return tree.tokenSlice(field.name_token);
}, },
.ErrorTag => {
const tag = node.cast(ast.Node.ErrorTag).?;
return tree.tokenSlice(tag.name_token);
},
.Identifier => { .Identifier => {
const field = node.cast(ast.Node.Identifier).?; const field = node.cast(ast.Node.Identifier).?;
return tree.tokenSlice(field.token); return tree.tokenSlice(field.token);