Merge pull request #630 from Techatrix/master
Reconstruct diagnostic range end of ast-gen
This commit is contained in:
		
						commit
						aa81d83136
					
				@ -197,20 +197,6 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
 | 
			
		||||
    });
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// TODO: Is this correct or can we get a better end?
 | 
			
		||||
fn astLocationToRange(loc: Ast.Location) types.Range {
 | 
			
		||||
    return .{
 | 
			
		||||
        .start = .{
 | 
			
		||||
            .line = @intCast(i64, loc.line),
 | 
			
		||||
            .character = @intCast(i64, loc.column),
 | 
			
		||||
        },
 | 
			
		||||
        .end = .{
 | 
			
		||||
            .line = @intCast(i64, loc.line),
 | 
			
		||||
            .character = @intCast(i64, loc.column),
 | 
			
		||||
        },
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
 | 
			
		||||
    const tracy_zone = tracy.trace(@src());
 | 
			
		||||
    defer tracy_zone.end();
 | 
			
		||||
@ -221,14 +207,12 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
    var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
 | 
			
		||||
 | 
			
		||||
    for (tree.errors) |err| {
 | 
			
		||||
        const loc = tree.tokenLocation(0, err.token);
 | 
			
		||||
 | 
			
		||||
        var mem_buffer: [256]u8 = undefined;
 | 
			
		||||
        var fbs = std.io.fixedBufferStream(&mem_buffer);
 | 
			
		||||
        try tree.renderError(err, fbs.writer());
 | 
			
		||||
 | 
			
		||||
        try diagnostics.append(allocator, .{
 | 
			
		||||
            .range = astLocationToRange(loc),
 | 
			
		||||
            .range = offsets.tokenToRange(tree, err.token, server.offset_encoding) catch continue,
 | 
			
		||||
            .severity = .Error,
 | 
			
		||||
            .code = @tagName(err.tag),
 | 
			
		||||
            .source = "zls",
 | 
			
		||||
@ -267,16 +251,18 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
                            if (first.len <= 1) break :lin;
 | 
			
		||||
                        } else break;
 | 
			
		||||
 | 
			
		||||
                        const pos = types.Position{
 | 
			
		||||
                        const position = types.Position{
 | 
			
		||||
                            .line = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
 | 
			
		||||
                            .character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
 | 
			
		||||
                        };
 | 
			
		||||
 | 
			
		||||
                        const range = try offsets.tokenPositionToRange(tree, position, server.offset_encoding);
 | 
			
		||||
 | 
			
		||||
                        const msg = pos_and_diag_iterator.rest()[1..];
 | 
			
		||||
 | 
			
		||||
                        if (std.mem.startsWith(u8, msg, "error: ")) {
 | 
			
		||||
                            try diagnostics.append(allocator, .{
 | 
			
		||||
                                .range = .{ .start = pos, .end = pos },
 | 
			
		||||
                                .range = range,
 | 
			
		||||
                                .severity = .Error,
 | 
			
		||||
                                .code = "ast_check",
 | 
			
		||||
                                .source = "zls",
 | 
			
		||||
@ -292,7 +278,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
 | 
			
		||||
                            const location = types.Location{
 | 
			
		||||
                                .uri = handle.uri(),
 | 
			
		||||
                                .range = .{ .start = pos, .end = pos },
 | 
			
		||||
                                .range = range,
 | 
			
		||||
                            };
 | 
			
		||||
 | 
			
		||||
                            fresh[fresh.len - 1] = .{
 | 
			
		||||
@ -303,7 +289,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
                            latestDiag.relatedInformation = fresh;
 | 
			
		||||
                        } else {
 | 
			
		||||
                            try diagnostics.append(allocator, .{
 | 
			
		||||
                                .range = .{ .start = pos, .end = pos },
 | 
			
		||||
                                .range = range,
 | 
			
		||||
                                .severity = .Error,
 | 
			
		||||
                                .code = "ast_check",
 | 
			
		||||
                                .source = "zls",
 | 
			
		||||
@ -336,7 +322,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
 | 
			
		||||
                if (std.mem.startsWith(u8, import_str, "\"./")) {
 | 
			
		||||
                    try diagnostics.append(allocator, .{
 | 
			
		||||
                        .range = astLocationToRange(tree.tokenLocation(0, import_str_token)),
 | 
			
		||||
                        .range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding) catch continue,
 | 
			
		||||
                        .severity = .Hint,
 | 
			
		||||
                        .code = "dot_slash_import",
 | 
			
		||||
                        .source = "zls",
 | 
			
		||||
@ -362,14 +348,12 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
                        if (func.extern_export_inline_token != null) break :blk;
 | 
			
		||||
 | 
			
		||||
                        if (func.name_token) |name_token| {
 | 
			
		||||
                            const loc = tree.tokenLocation(0, name_token);
 | 
			
		||||
 | 
			
		||||
                            const is_type_function = analysis.isTypeFunction(tree, func);
 | 
			
		||||
 | 
			
		||||
                            const func_name = tree.tokenSlice(name_token);
 | 
			
		||||
                            if (!is_type_function and !analysis.isCamelCase(func_name)) {
 | 
			
		||||
                                try diagnostics.append(allocator, .{
 | 
			
		||||
                                    .range = astLocationToRange(loc),
 | 
			
		||||
                                    .range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
 | 
			
		||||
                                    .severity = .Hint,
 | 
			
		||||
                                    .code = "bad_style",
 | 
			
		||||
                                    .source = "zls",
 | 
			
		||||
@ -377,7 +361,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
 | 
			
		||||
                                });
 | 
			
		||||
                            } else if (is_type_function and !analysis.isPascalCase(func_name)) {
 | 
			
		||||
                                try diagnostics.append(allocator, .{
 | 
			
		||||
                                    .range = astLocationToRange(loc),
 | 
			
		||||
                                    .range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
 | 
			
		||||
                                    .severity = .Hint,
 | 
			
		||||
                                    .code = "bad_style",
 | 
			
		||||
                                    .source = "zls",
 | 
			
		||||
 | 
			
		||||
@ -142,13 +142,22 @@ pub fn tokenRelativeLocation(tree: Ast, start_index: usize, token_start: usize,
 | 
			
		||||
 | 
			
		||||
/// Asserts the token is comprised of valid utf8
 | 
			
		||||
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
 | 
			
		||||
    const token_loc = tokenLocation(tree, token);
 | 
			
		||||
    if (encoding == .utf8)
 | 
			
		||||
        return token_loc.end - token_loc.start;
 | 
			
		||||
    return locationLength(tokenLocation(tree, token), tree, encoding);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
    var i: usize = token_loc.start;
 | 
			
		||||
/// Token location inside source
 | 
			
		||||
pub const Loc = struct {
 | 
			
		||||
    start: usize,
 | 
			
		||||
    end: usize,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub fn locationLength(loc: Loc, tree: Ast, encoding: Encoding) usize {
 | 
			
		||||
    if (encoding == .utf8)
 | 
			
		||||
        return loc.end - loc.start;
 | 
			
		||||
 | 
			
		||||
    var i: usize = loc.start;
 | 
			
		||||
    var utf16_len: usize = 0;
 | 
			
		||||
    while (i < token_loc.end) {
 | 
			
		||||
    while (i < loc.end) {
 | 
			
		||||
        const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
 | 
			
		||||
        const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
 | 
			
		||||
        if (codepoint < 0x10000) {
 | 
			
		||||
@ -161,12 +170,6 @@ pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
 | 
			
		||||
    return utf16_len;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Token location inside source
 | 
			
		||||
pub const Loc = struct {
 | 
			
		||||
    start: usize,
 | 
			
		||||
    end: usize,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
 | 
			
		||||
    const start = tree.tokens.items(.start)[token_index];
 | 
			
		||||
    const tag = tree.tokens.items(.tag)[token_index];
 | 
			
		||||
@ -183,6 +186,50 @@ pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
 | 
			
		||||
    return .{ .start = token.loc.start, .end = token.loc.end };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// returns the range of the given token at `token_index`
 | 
			
		||||
pub fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) !types.Range {
 | 
			
		||||
    const loc = try tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[token_index], encoding);
 | 
			
		||||
    const length = tokenLength(tree, token_index, encoding);
 | 
			
		||||
 | 
			
		||||
    return types.Range{
 | 
			
		||||
        .start = .{
 | 
			
		||||
            .line = @intCast(i64, loc.line),
 | 
			
		||||
            .character = @intCast(i64, loc.column),
 | 
			
		||||
        },
 | 
			
		||||
        .end = .{
 | 
			
		||||
            .line = @intCast(i64, loc.line),
 | 
			
		||||
            .character = @intCast(i64, loc.column + length),
 | 
			
		||||
        },
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// returns the range of a token pointed to by `position`
 | 
			
		||||
pub fn tokenPositionToRange(tree: Ast, position: types.Position, encoding: Encoding) !types.Range {
 | 
			
		||||
    const doc = .{
 | 
			
		||||
        .uri = undefined,
 | 
			
		||||
        .text = tree.source,
 | 
			
		||||
        .mem = undefined,
 | 
			
		||||
    };
 | 
			
		||||
    const document_position = try documentPosition(doc, position, encoding);
 | 
			
		||||
 | 
			
		||||
    var tokenizer: std.zig.Tokenizer = .{
 | 
			
		||||
        .buffer = tree.source,
 | 
			
		||||
        .index = document_position.absolute_index,
 | 
			
		||||
        .pending_invalid_token = null,
 | 
			
		||||
    };
 | 
			
		||||
    const token = tokenizer.next();
 | 
			
		||||
    const loc: Loc = .{ .start = token.loc.start, .end = token.loc.end };
 | 
			
		||||
    const length = locationLength(loc, tree, encoding);
 | 
			
		||||
 | 
			
		||||
    return types.Range{
 | 
			
		||||
        .start = position,
 | 
			
		||||
        .end = .{
 | 
			
		||||
            .line = position.line,
 | 
			
		||||
            .character = position.character + @intCast(i64, length),
 | 
			
		||||
        },
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
 | 
			
		||||
    var line_idx: i64 = 0;
 | 
			
		||||
    var curr_line: []const u8 = doc.text;
 | 
			
		||||
 | 
			
		||||
@ -8,19 +8,9 @@ const log = std.log.scoped(.references);
 | 
			
		||||
const ast = @import("ast.zig");
 | 
			
		||||
 | 
			
		||||
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
 | 
			
		||||
    const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
 | 
			
		||||
    try handler(context, types.Location{
 | 
			
		||||
        .uri = handle.uri(),
 | 
			
		||||
        .range = .{
 | 
			
		||||
            .start = .{
 | 
			
		||||
                .line = @intCast(i64, loc.line),
 | 
			
		||||
                .character = @intCast(i64, loc.column),
 | 
			
		||||
            },
 | 
			
		||||
            .end = .{
 | 
			
		||||
                .line = @intCast(i64, loc.line),
 | 
			
		||||
                .character = @intCast(i64, loc.column + offsets.tokenLength(handle.tree, tok, encoding)),
 | 
			
		||||
            },
 | 
			
		||||
        },
 | 
			
		||||
        .range = offsets.tokenToRange(handle.tree, tok,encoding) catch return,
 | 
			
		||||
    });
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
		Reference in New Issue
	
	Block a user