respect document encoding
This commit is contained in:
parent
925cc3fee9
commit
7730c7715b
@ -197,57 +197,6 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the range of the given token at `token_index`
|
|
||||||
fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex) types.Range {
|
|
||||||
const loc = tree.tokenLocation(0, token_index);
|
|
||||||
const length = tree.tokenSlice(token_index).len;
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.start = .{
|
|
||||||
.line = @intCast(i64, loc.line),
|
|
||||||
.character = @intCast(i64, loc.column),
|
|
||||||
},
|
|
||||||
.end = .{
|
|
||||||
.line = @intCast(i64, loc.line),
|
|
||||||
.character = @intCast(i64, loc.column + length),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// returns the source index in `text` at `position`
|
|
||||||
fn positionToIndex(text: [:0]const u8, position: types.Position) usize {
|
|
||||||
var current_line: usize = 0;
|
|
||||||
|
|
||||||
for (text) |c, i| {
|
|
||||||
if (current_line == position.line) {
|
|
||||||
return @minimum(i + @intCast(usize, position.character), text.len);
|
|
||||||
}
|
|
||||||
if (c == '\n') {
|
|
||||||
current_line += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return text.len;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// returns the range of a token pointed to by `position`
|
|
||||||
fn tokenPositionToRange(text: [:0]const u8, position: types.Position) types.Range {
|
|
||||||
var tokenizer: std.zig.Tokenizer = .{
|
|
||||||
.buffer = text,
|
|
||||||
.index = positionToIndex(text, position),
|
|
||||||
.pending_invalid_token = null,
|
|
||||||
};
|
|
||||||
const token = tokenizer.next();
|
|
||||||
const length = @intCast(i64, token.loc.end - token.loc.start);
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.start = position,
|
|
||||||
.end = .{
|
|
||||||
.line = position.line,
|
|
||||||
.character = position.character + length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
|
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
@ -263,7 +212,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
try tree.renderError(err, fbs.writer());
|
try tree.renderError(err, fbs.writer());
|
||||||
|
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = tokenToRange(tree, err.token),
|
.range = offsets.tokenToRange(tree, err.token, server.offset_encoding) catch continue,
|
||||||
.severity = .Error,
|
.severity = .Error,
|
||||||
.code = @tagName(err.tag),
|
.code = @tagName(err.tag),
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -307,7 +256,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
.character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
|
.character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
const range = tokenPositionToRange(handle.document.text, position);
|
const range = try offsets.tokenPositionToRange(tree, position, server.offset_encoding);
|
||||||
|
|
||||||
const msg = pos_and_diag_iterator.rest()[1..];
|
const msg = pos_and_diag_iterator.rest()[1..];
|
||||||
|
|
||||||
@ -373,7 +322,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
|
|
||||||
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = tokenToRange(tree, import_str_token),
|
.range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding) catch continue,
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "dot_slash_import",
|
.code = "dot_slash_import",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -404,7 +353,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
const func_name = tree.tokenSlice(name_token);
|
const func_name = tree.tokenSlice(name_token);
|
||||||
if (!is_type_function and !analysis.isCamelCase(func_name)) {
|
if (!is_type_function and !analysis.isCamelCase(func_name)) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = tokenToRange(tree, name_token),
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "bad_style",
|
.code = "bad_style",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -412,7 +361,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
});
|
});
|
||||||
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = tokenToRange(tree, name_token),
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "bad_style",
|
.code = "bad_style",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
|
@ -142,13 +142,22 @@ pub fn tokenRelativeLocation(tree: Ast, start_index: usize, token_start: usize,
|
|||||||
|
|
||||||
/// Asserts the token is comprised of valid utf8
|
/// Asserts the token is comprised of valid utf8
|
||||||
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
|
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
|
||||||
const token_loc = tokenLocation(tree, token);
|
return locationLength(tokenLocation(tree, token), tree, encoding);
|
||||||
if (encoding == .utf8)
|
}
|
||||||
return token_loc.end - token_loc.start;
|
|
||||||
|
|
||||||
var i: usize = token_loc.start;
|
/// Token location inside source
|
||||||
|
pub const Loc = struct {
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn locationLength(loc: Loc, tree: Ast, encoding: Encoding) usize {
|
||||||
|
if (encoding == .utf8)
|
||||||
|
return loc.end - loc.start;
|
||||||
|
|
||||||
|
var i: usize = loc.start;
|
||||||
var utf16_len: usize = 0;
|
var utf16_len: usize = 0;
|
||||||
while (i < token_loc.end) {
|
while (i < loc.end) {
|
||||||
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
|
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
|
||||||
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
|
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
|
||||||
if (codepoint < 0x10000) {
|
if (codepoint < 0x10000) {
|
||||||
@ -161,12 +170,6 @@ pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
|
|||||||
return utf16_len;
|
return utf16_len;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Token location inside source
|
|
||||||
pub const Loc = struct {
|
|
||||||
start: usize,
|
|
||||||
end: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
||||||
const start = tree.tokens.items(.start)[token_index];
|
const start = tree.tokens.items(.start)[token_index];
|
||||||
const tag = tree.tokens.items(.tag)[token_index];
|
const tag = tree.tokens.items(.tag)[token_index];
|
||||||
@ -183,6 +186,50 @@ pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
|||||||
return .{ .start = token.loc.start, .end = token.loc.end };
|
return .{ .start = token.loc.start, .end = token.loc.end };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// returns the range of the given token at `token_index`
|
||||||
|
pub fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) !types.Range {
|
||||||
|
const loc = try tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[token_index], encoding);
|
||||||
|
const length = tokenLength(tree, token_index, encoding);
|
||||||
|
|
||||||
|
return types.Range{
|
||||||
|
.start = .{
|
||||||
|
.line = @intCast(i64, loc.line),
|
||||||
|
.character = @intCast(i64, loc.column),
|
||||||
|
},
|
||||||
|
.end = .{
|
||||||
|
.line = @intCast(i64, loc.line),
|
||||||
|
.character = @intCast(i64, loc.column + length),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns the range of a token pointed to by `position`
|
||||||
|
pub fn tokenPositionToRange(tree: Ast, position: types.Position, encoding: Encoding) !types.Range {
|
||||||
|
const doc = .{
|
||||||
|
.uri = undefined,
|
||||||
|
.text = tree.source,
|
||||||
|
.mem = undefined,
|
||||||
|
};
|
||||||
|
const document_position = try documentPosition(doc, position, encoding);
|
||||||
|
|
||||||
|
var tokenizer: std.zig.Tokenizer = .{
|
||||||
|
.buffer = tree.source,
|
||||||
|
.index = document_position.absolute_index,
|
||||||
|
.pending_invalid_token = null,
|
||||||
|
};
|
||||||
|
const token = tokenizer.next();
|
||||||
|
const loc: Loc = .{ .start = token.loc.start, .end = token.loc.end };
|
||||||
|
const length = locationLength(loc, tree, encoding);
|
||||||
|
|
||||||
|
return types.Range{
|
||||||
|
.start = position,
|
||||||
|
.end = .{
|
||||||
|
.line = position.line,
|
||||||
|
.character = position.character + @intCast(i64, length),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
|
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
|
||||||
var line_idx: i64 = 0;
|
var line_idx: i64 = 0;
|
||||||
var curr_line: []const u8 = doc.text;
|
var curr_line: []const u8 = doc.text;
|
||||||
|
@ -8,19 +8,9 @@ const log = std.log.scoped(.references);
|
|||||||
const ast = @import("ast.zig");
|
const ast = @import("ast.zig");
|
||||||
|
|
||||||
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
||||||
const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
|
|
||||||
try handler(context, types.Location{
|
try handler(context, types.Location{
|
||||||
.uri = handle.uri(),
|
.uri = handle.uri(),
|
||||||
.range = .{
|
.range = offsets.tokenToRange(handle.tree, tok,encoding) catch return,
|
||||||
.start = .{
|
|
||||||
.line = @intCast(i64, loc.line),
|
|
||||||
.character = @intCast(i64, loc.column),
|
|
||||||
},
|
|
||||||
.end = .{
|
|
||||||
.line = @intCast(i64, loc.line),
|
|
||||||
.character = @intCast(i64, loc.column + offsets.tokenLength(handle.tree, tok, encoding)),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user