Merge pull request #630 from Techatrix/master

Reconstruct diagnostic range end of ast-gen
This commit is contained in:
Auguste Rame 2022-09-05 20:50:02 -04:00 committed by GitHub
commit aa81d83136
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 69 additions and 48 deletions

View File

@ -197,20 +197,6 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
}); });
} }
// TODO: Is this correct or can we get a better end?
fn astLocationToRange(loc: Ast.Location) types.Range {
return .{
.start = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column),
},
.end = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column),
},
};
}
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void { fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -221,14 +207,12 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){}; var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
for (tree.errors) |err| { for (tree.errors) |err| {
const loc = tree.tokenLocation(0, err.token);
var mem_buffer: [256]u8 = undefined; var mem_buffer: [256]u8 = undefined;
var fbs = std.io.fixedBufferStream(&mem_buffer); var fbs = std.io.fixedBufferStream(&mem_buffer);
try tree.renderError(err, fbs.writer()); try tree.renderError(err, fbs.writer());
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = astLocationToRange(loc), .range = offsets.tokenToRange(tree, err.token, server.offset_encoding) catch continue,
.severity = .Error, .severity = .Error,
.code = @tagName(err.tag), .code = @tagName(err.tag),
.source = "zls", .source = "zls",
@ -267,16 +251,18 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
if (first.len <= 1) break :lin; if (first.len <= 1) break :lin;
} else break; } else break;
const pos = types.Position{ const position = types.Position{
.line = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1, .line = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
.character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1, .character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
}; };
const range = try offsets.tokenPositionToRange(tree, position, server.offset_encoding);
const msg = pos_and_diag_iterator.rest()[1..]; const msg = pos_and_diag_iterator.rest()[1..];
if (std.mem.startsWith(u8, msg, "error: ")) { if (std.mem.startsWith(u8, msg, "error: ")) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = .{ .start = pos, .end = pos }, .range = range,
.severity = .Error, .severity = .Error,
.code = "ast_check", .code = "ast_check",
.source = "zls", .source = "zls",
@ -292,7 +278,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
const location = types.Location{ const location = types.Location{
.uri = handle.uri(), .uri = handle.uri(),
.range = .{ .start = pos, .end = pos }, .range = range,
}; };
fresh[fresh.len - 1] = .{ fresh[fresh.len - 1] = .{
@ -303,7 +289,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
latestDiag.relatedInformation = fresh; latestDiag.relatedInformation = fresh;
} else { } else {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = .{ .start = pos, .end = pos }, .range = range,
.severity = .Error, .severity = .Error,
.code = "ast_check", .code = "ast_check",
.source = "zls", .source = "zls",
@ -336,7 +322,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
if (std.mem.startsWith(u8, import_str, "\"./")) { if (std.mem.startsWith(u8, import_str, "\"./")) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = astLocationToRange(tree.tokenLocation(0, import_str_token)), .range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding) catch continue,
.severity = .Hint, .severity = .Hint,
.code = "dot_slash_import", .code = "dot_slash_import",
.source = "zls", .source = "zls",
@ -362,14 +348,12 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
if (func.extern_export_inline_token != null) break :blk; if (func.extern_export_inline_token != null) break :blk;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
const loc = tree.tokenLocation(0, name_token);
const is_type_function = analysis.isTypeFunction(tree, func); const is_type_function = analysis.isTypeFunction(tree, func);
const func_name = tree.tokenSlice(name_token); const func_name = tree.tokenSlice(name_token);
if (!is_type_function and !analysis.isCamelCase(func_name)) { if (!is_type_function and !analysis.isCamelCase(func_name)) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = astLocationToRange(loc), .range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
.severity = .Hint, .severity = .Hint,
.code = "bad_style", .code = "bad_style",
.source = "zls", .source = "zls",
@ -377,7 +361,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
}); });
} else if (is_type_function and !analysis.isPascalCase(func_name)) { } else if (is_type_function and !analysis.isPascalCase(func_name)) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = astLocationToRange(loc), .range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
.severity = .Hint, .severity = .Hint,
.code = "bad_style", .code = "bad_style",
.source = "zls", .source = "zls",

View File

@ -142,13 +142,22 @@ pub fn tokenRelativeLocation(tree: Ast, start_index: usize, token_start: usize,
/// Asserts the token is comprised of valid utf8 /// Asserts the token is comprised of valid utf8
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize { pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
const token_loc = tokenLocation(tree, token); return locationLength(tokenLocation(tree, token), tree, encoding);
if (encoding == .utf8) }
return token_loc.end - token_loc.start;
var i: usize = token_loc.start; /// Token location inside source
pub const Loc = struct {
start: usize,
end: usize,
};
pub fn locationLength(loc: Loc, tree: Ast, encoding: Encoding) usize {
if (encoding == .utf8)
return loc.end - loc.start;
var i: usize = loc.start;
var utf16_len: usize = 0; var utf16_len: usize = 0;
while (i < token_loc.end) { while (i < loc.end) {
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable; const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable; const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
if (codepoint < 0x10000) { if (codepoint < 0x10000) {
@ -161,12 +170,6 @@ pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
return utf16_len; return utf16_len;
} }
/// Token location inside source
pub const Loc = struct {
start: usize,
end: usize,
};
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc { pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
const start = tree.tokens.items(.start)[token_index]; const start = tree.tokens.items(.start)[token_index];
const tag = tree.tokens.items(.tag)[token_index]; const tag = tree.tokens.items(.tag)[token_index];
@ -183,6 +186,50 @@ pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
return .{ .start = token.loc.start, .end = token.loc.end }; return .{ .start = token.loc.start, .end = token.loc.end };
} }
/// returns the range of the given token at `token_index`
pub fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) !types.Range {
const loc = try tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[token_index], encoding);
const length = tokenLength(tree, token_index, encoding);
return types.Range{
.start = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column),
},
.end = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column + length),
},
};
}
/// returns the range of a token pointed to by `position`
pub fn tokenPositionToRange(tree: Ast, position: types.Position, encoding: Encoding) !types.Range {
const doc = .{
.uri = undefined,
.text = tree.source,
.mem = undefined,
};
const document_position = try documentPosition(doc, position, encoding);
var tokenizer: std.zig.Tokenizer = .{
.buffer = tree.source,
.index = document_position.absolute_index,
.pending_invalid_token = null,
};
const token = tokenizer.next();
const loc: Loc = .{ .start = token.loc.start, .end = token.loc.end };
const length = locationLength(loc, tree, encoding);
return types.Range{
.start = position,
.end = .{
.line = position.line,
.character = position.character + @intCast(i64, length),
},
};
}
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range { pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
var line_idx: i64 = 0; var line_idx: i64 = 0;
var curr_line: []const u8 = doc.text; var curr_line: []const u8 = doc.text;

View File

@ -8,19 +8,9 @@ const log = std.log.scoped(.references);
const ast = @import("ast.zig"); const ast = @import("ast.zig");
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void { fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
try handler(context, types.Location{ try handler(context, types.Location{
.uri = handle.uri(), .uri = handle.uri(),
.range = .{ .range = offsets.tokenToRange(handle.tree, tok,encoding) catch return,
.start = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column),
},
.end = .{
.line = @intCast(i64, loc.line),
.character = @intCast(i64, loc.column + offsets.tokenLength(handle.tree, tok, encoding)),
},
},
}); });
} }