Completely overhaul offsets.zig (#643)
* completely overhaul offsets.zig
This commit is contained in:
parent
5aff17afb0
commit
e28549fa7d
@ -847,18 +847,18 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
// TODO: add tests and validate the JSON
|
// TODO: add tests and validate the JSON
|
||||||
const start_obj = range.Object.get("start").?.Object;
|
const start_obj = range.Object.get("start").?.Object;
|
||||||
const start_pos = types.Position{
|
const start_pos = types.Position{
|
||||||
.line = start_obj.get("line").?.Integer,
|
.line = @intCast(u32, start_obj.get("line").?.Integer),
|
||||||
.character = start_obj.get("character").?.Integer,
|
.character = @intCast(u32, start_obj.get("character").?.Integer),
|
||||||
};
|
};
|
||||||
const end_obj = range.Object.get("end").?.Object;
|
const end_obj = range.Object.get("end").?.Object;
|
||||||
const end_pos = types.Position{
|
const end_pos = types.Position{
|
||||||
.line = end_obj.get("line").?.Integer,
|
.line = @intCast(u32, end_obj.get("line").?.Integer),
|
||||||
.character = end_obj.get("character").?.Integer,
|
.character = @intCast(u32, end_obj.get("character").?.Integer),
|
||||||
};
|
};
|
||||||
|
|
||||||
const change_text = change.Object.get("text").?.String;
|
const change_text = change.Object.get("text").?.String;
|
||||||
const start_index = (try offsets.documentPosition(document.*, start_pos, offset_encoding)).absolute_index;
|
const start_index = offsets.positionToIndex(document.text, start_pos, offset_encoding);
|
||||||
const end_index = (try offsets.documentPosition(document.*, end_pos, offset_encoding)).absolute_index;
|
const end_index = offsets.positionToIndex(document.text, end_pos, offset_encoding);
|
||||||
|
|
||||||
const old_len = document.text.len;
|
const old_len = document.text.len;
|
||||||
const new_len = old_len - (end_index - start_index) + change_text.len;
|
const new_len = old_len - (end_index - start_index) + change_text.len;
|
||||||
|
222
src/Server.zig
222
src/Server.zig
@ -157,7 +157,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
try tree.renderError(err, fbs.writer());
|
try tree.renderError(err, fbs.writer());
|
||||||
|
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = offsets.tokenToRange(tree, err.token, server.offset_encoding) catch continue,
|
.range = offsets.tokenToRange(tree, err.token, server.offset_encoding),
|
||||||
.severity = .Error,
|
.severity = .Error,
|
||||||
.code = @tagName(err.tag),
|
.code = @tagName(err.tag),
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -196,12 +196,14 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
if (first.len <= 1) break :lin;
|
if (first.len <= 1) break :lin;
|
||||||
} else break;
|
} else break;
|
||||||
|
|
||||||
const position = types.Position{
|
const utf8_position = types.Position{
|
||||||
.line = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
|
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||||
.character = (try std.fmt.parseInt(i64, pos_and_diag_iterator.next().?, 10)) - 1,
|
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
const range = try offsets.tokenPositionToRange(tree, position, server.offset_encoding);
|
// zig uses utf-8 encoding for character offsets
|
||||||
|
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
|
||||||
|
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
|
||||||
|
|
||||||
const msg = pos_and_diag_iterator.rest()[1..];
|
const msg = pos_and_diag_iterator.rest()[1..];
|
||||||
|
|
||||||
@ -267,7 +269,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
|
|
||||||
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding) catch continue,
|
.range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding),
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "dot_slash_import",
|
.code = "dot_slash_import",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -298,7 +300,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
const func_name = tree.tokenSlice(name_token);
|
const func_name = tree.tokenSlice(name_token);
|
||||||
if (!is_type_function and !analysis.isCamelCase(func_name)) {
|
if (!is_type_function and !analysis.isCamelCase(func_name)) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "bad_style",
|
.code = "bad_style",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -306,7 +308,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
});
|
});
|
||||||
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
||||||
try diagnostics.append(allocator, .{
|
try diagnostics.append(allocator, .{
|
||||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding) catch continue,
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||||
.severity = .Hint,
|
.severity = .Hint,
|
||||||
.code = "bad_style",
|
.code = "bad_style",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
@ -595,7 +597,7 @@ fn nodeToCompletion(
|
|||||||
.label = string,
|
.label = string,
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = tree.getNodeSource(node),
|
.detail = offsets.nodeToSlice(tree, node),
|
||||||
.insertText = string,
|
.insertText = string,
|
||||||
.insertTextFormat = .PlainText,
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
@ -638,20 +640,19 @@ fn gotoDefinitionSymbol(
|
|||||||
|
|
||||||
var handle = decl_handle.handle;
|
var handle = decl_handle.handle;
|
||||||
|
|
||||||
const location = switch (decl_handle.decl.*) {
|
const name_token = switch (decl_handle.decl.*) {
|
||||||
.ast_node => |node| block: {
|
.ast_node => |node| block: {
|
||||||
if (resolve_alias) {
|
if (resolve_alias) {
|
||||||
if (try analysis.resolveVarDeclAlias(&server.document_store, &server.arena, .{ .node = node, .handle = handle })) |result| {
|
if (try analysis.resolveVarDeclAlias(&server.document_store, &server.arena, .{ .node = node, .handle = handle })) |result| {
|
||||||
handle = result.handle;
|
handle = result.handle;
|
||||||
break :block result.location(server.offset_encoding) catch return;
|
|
||||||
|
break :block result.nameToken();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const name_token = analysis.getDeclNameToken(handle.tree, node) orelse
|
break :block analysis.getDeclNameToken(handle.tree, node) orelse return try respondGeneric(writer, id, null_result_response);
|
||||||
return try respondGeneric(writer, id, null_result_response);
|
|
||||||
break :block offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[name_token], server.offset_encoding) catch return;
|
|
||||||
},
|
},
|
||||||
else => decl_handle.location(server.offset_encoding) catch return,
|
else => decl_handle.nameToken(),
|
||||||
};
|
};
|
||||||
|
|
||||||
try send(writer, server.arena.allocator(), types.Response{
|
try send(writer, server.arena.allocator(), types.Response{
|
||||||
@ -659,16 +660,7 @@ fn gotoDefinitionSymbol(
|
|||||||
.result = .{
|
.result = .{
|
||||||
.Location = .{
|
.Location = .{
|
||||||
.uri = handle.document.uri,
|
.uri = handle.document.uri,
|
||||||
.range = .{
|
.range = offsets.tokenToRange(handle.tree, name_token, server.offset_encoding),
|
||||||
.start = .{
|
|
||||||
.line = @intCast(i64, location.line),
|
|
||||||
.character = @intCast(i64, location.column),
|
|
||||||
},
|
|
||||||
.end = .{
|
|
||||||
.line = @intCast(i64, location.line),
|
|
||||||
.character = @intCast(i64, location.column),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@ -720,8 +712,8 @@ fn hoverSymbol(
|
|||||||
tree.firstToken(param.type_expr); // extern fn
|
tree.firstToken(param.type_expr); // extern fn
|
||||||
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
||||||
|
|
||||||
const start = offsets.tokenLocation(tree, first_token).start;
|
const start = offsets.tokenToIndex(tree, first_token);
|
||||||
const end = offsets.tokenLocation(tree, last_token).end;
|
const end = offsets.tokenToLoc(tree, last_token).end;
|
||||||
break :def tree.source[start..end];
|
break :def tree.source[start..end];
|
||||||
},
|
},
|
||||||
.pointer_payload => |payload| tree.tokenSlice(payload.name),
|
.pointer_payload => |payload| tree.tokenSlice(payload.name),
|
||||||
@ -740,7 +732,7 @@ fn hoverSymbol(
|
|||||||
.slice,
|
.slice,
|
||||||
.error_union,
|
.error_union,
|
||||||
.primitive,
|
.primitive,
|
||||||
=> |p| if (p >= tree.nodes.len) "unknown" else tree.getNodeSource(p),
|
=> |p| if (p >= tree.nodes.len) "unknown" else offsets.nodeToSlice(tree, p),
|
||||||
.other => |p| if (p >= tree.nodes.len) "unknown" else switch (tree.nodes.items(.tag)[p]) {
|
.other => |p| if (p >= tree.nodes.len) "unknown" else switch (tree.nodes.items(.tag)[p]) {
|
||||||
.container_decl,
|
.container_decl,
|
||||||
.container_decl_arg,
|
.container_decl_arg,
|
||||||
@ -767,7 +759,7 @@ fn hoverSymbol(
|
|||||||
.ptr_type_aligned,
|
.ptr_type_aligned,
|
||||||
.ptr_type_bit_range,
|
.ptr_type_bit_range,
|
||||||
.ptr_type_sentinel,
|
.ptr_type_sentinel,
|
||||||
=> tree.getNodeSource(p),
|
=> offsets.nodeToSlice(tree, p),
|
||||||
else => "unknown", // TODO: Implement more "other" type expressions; better safe than sorry
|
else => "unknown", // TODO: Implement more "other" type expressions; better safe than sorry
|
||||||
},
|
},
|
||||||
else => "unknown",
|
else => "unknown",
|
||||||
@ -899,21 +891,20 @@ fn hoverDefinitionGlobal(server: *Server, writer: anytype, id: types.RequestId,
|
|||||||
fn getSymbolFieldAccess(
|
fn getSymbolFieldAccess(
|
||||||
server: *Server,
|
server: *Server,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
position: offsets.DocumentPosition,
|
source_index: usize,
|
||||||
range: analysis.SourceRange,
|
loc: offsets.Loc,
|
||||||
) !?analysis.DeclWithHandle {
|
) !?analysis.DeclWithHandle {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
const name = identifierFromPosition(position.absolute_index, handle.*);
|
const name = identifierFromPosition(source_index, handle.*);
|
||||||
if (name.len == 0) return null;
|
if (name.len == 0) return null;
|
||||||
|
|
||||||
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
var held_range = handle.document.borrowNullTerminatedSlice(loc.start, loc.end);
|
||||||
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
|
||||||
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||||
|
|
||||||
errdefer held_range.release();
|
errdefer held_range.release();
|
||||||
if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, position.absolute_index, &tokenizer)) |result| {
|
if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) |result| {
|
||||||
held_range.release();
|
held_range.release();
|
||||||
const container_handle = result.unwrapped orelse result.original;
|
const container_handle = result.unwrapped orelse result.original;
|
||||||
const container_handle_node = switch (container_handle.type.data) {
|
const container_handle_node = switch (container_handle.type.data) {
|
||||||
@ -936,14 +927,14 @@ fn gotoDefinitionFieldAccess(
|
|||||||
writer: anytype,
|
writer: anytype,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
position: offsets.DocumentPosition,
|
source_index: usize,
|
||||||
range: analysis.SourceRange,
|
loc: offsets.Loc,
|
||||||
resolve_alias: bool,
|
resolve_alias: bool,
|
||||||
) !void {
|
) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response);
|
const decl = (try server.getSymbolFieldAccess(handle, source_index, loc)) orelse return try respondGeneric(writer, id, null_result_response);
|
||||||
return try server.gotoDefinitionSymbol(writer, id, decl, resolve_alias);
|
return try server.gotoDefinitionSymbol(writer, id, decl, resolve_alias);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -952,13 +943,13 @@ fn hoverDefinitionFieldAccess(
|
|||||||
writer: anytype,
|
writer: anytype,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
position: offsets.DocumentPosition,
|
source_index: usize,
|
||||||
range: analysis.SourceRange,
|
loc: offsets.Loc,
|
||||||
) !void {
|
) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response);
|
const decl = (try server.getSymbolFieldAccess(handle, source_index, loc)) orelse return try respondGeneric(writer, id, null_result_response);
|
||||||
return try server.hoverSymbol(writer, id, decl);
|
return try server.hoverSymbol(writer, id, decl);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1022,14 +1013,14 @@ fn renameDefinitionFieldAccess(
|
|||||||
writer: anytype,
|
writer: anytype,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
position: offsets.DocumentPosition,
|
source_index: usize,
|
||||||
range: analysis.SourceRange,
|
loc: offsets.Loc,
|
||||||
new_name: []const u8,
|
new_name: []const u8,
|
||||||
) !void {
|
) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response);
|
const decl = (try server.getSymbolFieldAccess(handle, source_index, loc)) orelse return try respondGeneric(writer, id, null_result_response);
|
||||||
|
|
||||||
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
||||||
try rename.renameSymbol(&server.arena, &server.document_store, decl, new_name, &workspace_edit.changes, server.offset_encoding);
|
try rename.renameSymbol(&server.arena, &server.document_store, decl, new_name, &workspace_edit.changes, server.offset_encoding);
|
||||||
@ -1114,8 +1105,8 @@ fn referencesDefinitionFieldAccess(
|
|||||||
writer: anytype,
|
writer: anytype,
|
||||||
id: types.RequestId,
|
id: types.RequestId,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
position: offsets.DocumentPosition,
|
source_index: usize,
|
||||||
range: analysis.SourceRange,
|
loc: offsets.Loc,
|
||||||
include_decl: bool,
|
include_decl: bool,
|
||||||
comptime highlight: bool,
|
comptime highlight: bool,
|
||||||
) !void {
|
) !void {
|
||||||
@ -1124,33 +1115,33 @@ fn referencesDefinitionFieldAccess(
|
|||||||
|
|
||||||
var allocator = server.arena.allocator();
|
var allocator = server.arena.allocator();
|
||||||
|
|
||||||
const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response);
|
const decl = (try server.getSymbolFieldAccess(handle, source_index, loc)) orelse return try respondGeneric(writer, id, null_result_response);
|
||||||
var locs = std.ArrayList(types.Location).init(allocator);
|
var locations = std.ArrayList(types.Location).init(allocator);
|
||||||
try references.symbolReferences(
|
try references.symbolReferences(
|
||||||
&server.arena,
|
&server.arena,
|
||||||
&server.document_store,
|
&server.document_store,
|
||||||
decl,
|
decl,
|
||||||
server.offset_encoding,
|
server.offset_encoding,
|
||||||
include_decl,
|
include_decl,
|
||||||
&locs,
|
&locations,
|
||||||
std.ArrayList(types.Location).append,
|
std.ArrayList(types.Location).append,
|
||||||
server.config.skip_std_references,
|
server.config.skip_std_references,
|
||||||
!highlight,
|
!highlight,
|
||||||
);
|
);
|
||||||
const result: types.ResponseParams = if (highlight) result: {
|
const result: types.ResponseParams = if (highlight) result: {
|
||||||
var highlights = std.ArrayListUnmanaged(types.DocumentHighlight){};
|
var highlights = std.ArrayListUnmanaged(types.DocumentHighlight){};
|
||||||
try highlights.ensureTotalCapacity(allocator, locs.items.len);
|
try highlights.ensureTotalCapacity(allocator, locations.items.len);
|
||||||
const uri = handle.uri();
|
const uri = handle.uri();
|
||||||
for (locs.items) |loc| {
|
for (locations.items) |location| {
|
||||||
if (std.mem.eql(u8, loc.uri, uri)) {
|
if (std.mem.eql(u8, location.uri, uri)) {
|
||||||
highlights.appendAssumeCapacity(.{
|
highlights.appendAssumeCapacity(.{
|
||||||
.range = loc.range,
|
.range = location.range,
|
||||||
.kind = .Text,
|
.kind = .Text,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break :result .{ .DocumentHighlight = highlights.items };
|
break :result .{ .DocumentHighlight = highlights.items };
|
||||||
} else .{ .Locations = locs.items };
|
} else .{ .Locations = locations.items };
|
||||||
try send(writer, allocator, types.Response{
|
try send(writer, allocator, types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = result,
|
.result = result,
|
||||||
@ -1250,7 +1241,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
|||||||
.label = tree.tokenSlice(param.name_token.?),
|
.label = tree.tokenSlice(param.name_token.?),
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end],
|
.detail = tree.source[offsets.tokenToIndex(tree, first_token)..offsets.tokenToLoc(tree, last_token).end],
|
||||||
.insertText = tree.tokenSlice(param.name_token.?),
|
.insertText = tree.tokenSlice(param.name_token.?),
|
||||||
.insertTextFormat = .PlainText,
|
.insertTextFormat = .PlainText,
|
||||||
});
|
});
|
||||||
@ -1402,19 +1393,17 @@ fn completeGlobal(server: *Server, writer: anytype, id: types.RequestId, pos_ind
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completeFieldAccess(server: *Server, writer: anytype, id: types.RequestId, handle: *DocumentStore.Handle, position: offsets.DocumentPosition, range: analysis.SourceRange) !void {
|
fn completeFieldAccess(server: *Server, writer: anytype, id: types.RequestId, handle: *DocumentStore.Handle, source_index: usize, loc: offsets.Loc) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
||||||
|
|
||||||
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
var held_range = handle.document.borrowNullTerminatedSlice(loc.start, loc.end);
|
||||||
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
defer held_range.release();
|
||||||
errdefer held_range.release();
|
|
||||||
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||||
|
|
||||||
if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, position.absolute_index, &tokenizer)) |result| {
|
if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) |result| {
|
||||||
held_range.release();
|
|
||||||
try server.typeToCompletion(&completions, result, handle);
|
try server.typeToCompletion(&completions, result, handle);
|
||||||
sortCompletionItems(completions.items, server.arena.allocator());
|
sortCompletionItems(completions.items, server.arena.allocator());
|
||||||
truncateCompletions(completions.items, server.config.max_detail_length);
|
truncateCompletions(completions.items, server.config.max_detail_length);
|
||||||
@ -1688,9 +1677,26 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
for (req.params.capabilities.offsetEncoding.value) |encoding| {
|
if(req.params.capabilities.general) |general| {
|
||||||
if (std.mem.eql(u8, encoding, "utf-8")) {
|
var supports_utf8 = false;
|
||||||
|
var supports_utf16 = false;
|
||||||
|
var supports_utf32 = false;
|
||||||
|
for(general.positionEncodings.value) |encoding| {
|
||||||
|
if (std.mem.eql(u8, encoding, "utf-8")) {
|
||||||
|
supports_utf8 = true;
|
||||||
|
} else if(std.mem.eql(u8, encoding, "utf-16")) {
|
||||||
|
supports_utf16 = true;
|
||||||
|
} else if(std.mem.eql(u8, encoding, "utf-32")) {
|
||||||
|
supports_utf32 = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(supports_utf8) {
|
||||||
server.offset_encoding = .utf8;
|
server.offset_encoding = .utf8;
|
||||||
|
} else if(supports_utf32) {
|
||||||
|
server.offset_encoding = .utf32;
|
||||||
|
} else {
|
||||||
|
server.offset_encoding = .utf16;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1721,10 +1727,7 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
.InitializeResult = .{
|
.InitializeResult = .{
|
||||||
.offsetEncoding = if (server.offset_encoding == .utf8)
|
.offsetEncoding = server.offset_encoding,
|
||||||
@as([]const u8, "utf-8")
|
|
||||||
else
|
|
||||||
"utf-16",
|
|
||||||
.serverInfo = .{
|
.serverInfo = .{
|
||||||
.name = "zls",
|
.name = "zls",
|
||||||
.version = "0.1.0",
|
.version = "0.1.0",
|
||||||
@ -1951,22 +1954,21 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
if (req.params.position.character == 0)
|
if (req.params.position.character == 0)
|
||||||
return try respondGeneric(writer, id, no_completions_response);
|
return try respondGeneric(writer, id, no_completions_response);
|
||||||
|
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
|
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.builtin => try server.completeBuiltin(writer, id),
|
.builtin => try server.completeBuiltin(writer, id),
|
||||||
.var_access, .empty => try server.completeGlobal(writer, id, doc_position.absolute_index, handle),
|
.var_access, .empty => try server.completeGlobal(writer, id, source_index, handle),
|
||||||
.field_access => |range| try server.completeFieldAccess(writer, id, handle, doc_position, range),
|
.field_access => |loc| try server.completeFieldAccess(writer, id, handle, source_index, loc),
|
||||||
.global_error_set => try server.completeError(writer, id, handle),
|
.global_error_set => try server.completeError(writer, id, handle),
|
||||||
.enum_literal => try server.completeDot(writer, id, handle),
|
.enum_literal => try server.completeDot(writer, id, handle),
|
||||||
.label => try server.completeLabel(writer, id, doc_position.absolute_index, handle),
|
.label => try server.completeLabel(writer, id, source_index, handle),
|
||||||
.import_string_literal, .embedfile_string_literal => |loc| {
|
.import_string_literal, .embedfile_string_literal => |loc| {
|
||||||
if (!server.config.enable_import_embedfile_argument_completions)
|
if (!server.config.enable_import_embedfile_argument_completions)
|
||||||
return try respondGeneric(writer, id, no_completions_response);
|
return try respondGeneric(writer, id, no_completions_response);
|
||||||
|
|
||||||
const line_mem_start = @ptrToInt(doc_position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
const completing = offsets.locToSlice(handle.tree.source, loc);
|
||||||
const completing = handle.tree.source[line_mem_start + loc.start + 1 .. line_mem_start + loc.end];
|
|
||||||
|
|
||||||
var subpath_present = false;
|
var subpath_present = false;
|
||||||
var fsl_completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
var fsl_completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
||||||
@ -2048,12 +2050,12 @@ fn signatureHelpHandler(server: *Server, writer: anytype, id: types.RequestId, r
|
|||||||
if (req.params.position.character == 0)
|
if (req.params.position.character == 0)
|
||||||
return try respondGeneric(writer, id, no_signatures_response);
|
return try respondGeneric(writer, id, no_signatures_response);
|
||||||
|
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
if (try getSignatureInfo(
|
if (try getSignatureInfo(
|
||||||
&server.document_store,
|
&server.document_store,
|
||||||
&server.arena,
|
&server.arena,
|
||||||
handle,
|
handle,
|
||||||
doc_position.absolute_index,
|
source_index,
|
||||||
data,
|
data,
|
||||||
)) |sig_info| {
|
)) |sig_info| {
|
||||||
return try send(writer, server.arena.allocator(), types.Response{
|
return try send(writer, server.arena.allocator(), types.Response{
|
||||||
@ -2080,14 +2082,14 @@ fn gotoHandler(server: *Server, writer: anytype, id: types.RequestId, req: reque
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (req.params.position.character >= 0) {
|
if (req.params.position.character >= 0) {
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
|
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.var_access => try server.gotoDefinitionGlobal(writer, id, doc_position.absolute_index, handle, resolve_alias),
|
.var_access => try server.gotoDefinitionGlobal(writer, id, source_index, handle, resolve_alias),
|
||||||
.field_access => |range| try server.gotoDefinitionFieldAccess(writer, id, handle, doc_position, range, resolve_alias),
|
.field_access => |loc| try server.gotoDefinitionFieldAccess(writer, id, handle, source_index, loc, resolve_alias),
|
||||||
.import_string_literal => try server.gotoDefinitionString(writer, id, doc_position.absolute_index, handle),
|
.import_string_literal => try server.gotoDefinitionString(writer, id, source_index, handle),
|
||||||
.label => try server.gotoDefinitionLabel(writer, id, doc_position.absolute_index, handle),
|
.label => try server.gotoDefinitionLabel(writer, id, source_index, handle),
|
||||||
else => try respondGeneric(writer, id, null_result_response),
|
else => try respondGeneric(writer, id, null_result_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2119,13 +2121,13 @@ fn hoverHandler(server: *Server, writer: anytype, id: types.RequestId, req: requ
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (req.params.position.character >= 0) {
|
if (req.params.position.character >= 0) {
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.builtin => try server.hoverDefinitionBuiltin(writer, id, doc_position.absolute_index, handle),
|
.builtin => try server.hoverDefinitionBuiltin(writer, id, source_index, handle),
|
||||||
.var_access => try server.hoverDefinitionGlobal(writer, id, doc_position.absolute_index, handle),
|
.var_access => try server.hoverDefinitionGlobal(writer, id, source_index, handle),
|
||||||
.field_access => |range| try server.hoverDefinitionFieldAccess(writer, id, handle, doc_position, range),
|
.field_access => |loc| try server.hoverDefinitionFieldAccess(writer, id, handle, source_index, loc),
|
||||||
.label => try server.hoverDefinitionLabel(writer, id, doc_position.absolute_index, handle),
|
.label => try server.hoverDefinitionLabel(writer, id, source_index, handle),
|
||||||
else => try respondGeneric(writer, id, null_result_response),
|
else => try respondGeneric(writer, id, null_result_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2174,6 +2176,7 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
if (std.mem.eql(u8, handle.document.text, stdout_bytes)) return try respondGeneric(writer, id, null_result_response);
|
if (std.mem.eql(u8, handle.document.text, stdout_bytes)) return try respondGeneric(writer, id, null_result_response);
|
||||||
|
|
||||||
var edits = diff.edits(server.allocator, handle.document.text, stdout_bytes) catch {
|
var edits = diff.edits(server.allocator, handle.document.text, stdout_bytes) catch {
|
||||||
|
const range = offsets.locToRange(handle.document.text, .{ .start = 0, .end = handle.document.text.len }, server.offset_encoding);
|
||||||
// If there was an error trying to diff the text, return the formatted response
|
// If there was an error trying to diff the text, return the formatted response
|
||||||
// as the new text for the entire range of the document
|
// as the new text for the entire range of the document
|
||||||
return try send(writer, server.arena.allocator(), types.Response{
|
return try send(writer, server.arena.allocator(), types.Response{
|
||||||
@ -2181,7 +2184,7 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
.result = .{
|
.result = .{
|
||||||
.TextEdits = &[1]types.TextEdit{
|
.TextEdits = &[1]types.TextEdit{
|
||||||
.{
|
.{
|
||||||
.range = try offsets.documentRange(handle.document, server.offset_encoding),
|
.range = range,
|
||||||
.newText = stdout_bytes,
|
.newText = stdout_bytes,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2230,13 +2233,12 @@ fn renameHandler(server: *Server, writer: anytype, id: types.RequestId, req: req
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (req.params.position.character >= 0) {
|
if (req.params.position.character >= 0) {
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
|
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.var_access => try server.renameDefinitionGlobal(writer, id, handle, doc_position.absolute_index, req.params.newName),
|
.var_access => try server.renameDefinitionGlobal(writer, id, handle, source_index, req.params.newName),
|
||||||
.field_access => |range| try server.renameDefinitionFieldAccess(writer, id, handle, doc_position, range, req.params.newName),
|
.field_access => |loc| try server.renameDefinitionFieldAccess(writer, id, handle, source_index, loc, req.params.newName),
|
||||||
.label => try server.renameDefinitionLabel(writer, id, handle, doc_position.absolute_index, req.params.newName),
|
.label => try server.renameDefinitionLabel(writer, id, handle, source_index, req.params.newName),
|
||||||
else => try respondGeneric(writer, id, null_result_response),
|
else => try respondGeneric(writer, id, null_result_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2273,14 +2275,14 @@ fn referencesHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (req.params.position.character >= 0) {
|
if (req.params.position.character >= 0) {
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
|
|
||||||
const include_decl = req.params.context.includeDeclaration;
|
const include_decl = req.params.context.includeDeclaration;
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.var_access => try server.referencesDefinitionGlobal(writer, id, handle, doc_position.absolute_index, include_decl, false),
|
.var_access => try server.referencesDefinitionGlobal(writer, id, handle, source_index, include_decl, false),
|
||||||
.field_access => |range| try server.referencesDefinitionFieldAccess(writer, id, handle, doc_position, range, include_decl, false),
|
.field_access => |loc| try server.referencesDefinitionFieldAccess(writer, id, handle, source_index, loc, include_decl, false),
|
||||||
.label => try server.referencesDefinitionLabel(writer, id, handle, doc_position.absolute_index, include_decl, false),
|
.label => try server.referencesDefinitionLabel(writer, id, handle, source_index, include_decl, false),
|
||||||
else => try respondGeneric(writer, id, null_result_response),
|
else => try respondGeneric(writer, id, null_result_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2298,13 +2300,13 @@ fn documentHighlightHandler(server: *Server, writer: anytype, id: types.RequestI
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (req.params.position.character >= 0) {
|
if (req.params.position.character >= 0) {
|
||||||
const doc_position = try offsets.documentPosition(handle.document, req.params.position, server.offset_encoding);
|
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
|
||||||
const pos_context = try analysis.documentPositionContext(&server.arena, handle.document, doc_position);
|
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
|
||||||
|
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.var_access => try server.referencesDefinitionGlobal(writer, id, handle, doc_position.absolute_index, true, true),
|
.var_access => try server.referencesDefinitionGlobal(writer, id, handle, source_index, true, true),
|
||||||
.field_access => |range| try server.referencesDefinitionFieldAccess(writer, id, handle, doc_position, range, true, true),
|
.field_access => |loc| try server.referencesDefinitionFieldAccess(writer, id, handle, source_index, loc, true, true),
|
||||||
.label => try server.referencesDefinitionLabel(writer, id, handle, doc_position.absolute_index, true, true),
|
.label => try server.referencesDefinitionLabel(writer, id, handle, source_index, true, true),
|
||||||
else => try respondGeneric(writer, id, null_result_response),
|
else => try respondGeneric(writer, id, null_result_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2336,7 +2338,15 @@ fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
// because the function could be stored in a different document
|
// because the function could be stored in a different document
|
||||||
// we need the regenerate hints when the document itself or its imported documents change
|
// we need the regenerate hints when the document itself or its imported documents change
|
||||||
// with caching it would also make sense to generate all hints instead of only the visible ones
|
// with caching it would also make sense to generate all hints instead of only the visible ones
|
||||||
const hints = try inlay_hints.writeRangeInlayHint(&server.arena, &server.config, &server.document_store, handle, req.params.range, hover_kind);
|
const hints = try inlay_hints.writeRangeInlayHint(
|
||||||
|
&server.arena,
|
||||||
|
&server.config,
|
||||||
|
&server.document_store,
|
||||||
|
handle,
|
||||||
|
req.params.range,
|
||||||
|
hover_kind,
|
||||||
|
server.offset_encoding,
|
||||||
|
);
|
||||||
defer {
|
defer {
|
||||||
for (hints) |hint| {
|
for (hints) |hint| {
|
||||||
server.allocator.free(hint.tooltip.value);
|
server.allocator.free(hint.tooltip.value);
|
||||||
|
249
src/analysis.zig
249
src/analysis.zig
@ -85,10 +85,10 @@ pub fn collectDocComments(allocator: std.mem.Allocator, tree: Ast, doc_comments:
|
|||||||
|
|
||||||
/// Gets a function's keyword, name, arguments and return value.
|
/// Gets a function's keyword, name, arguments and return value.
|
||||||
pub fn getFunctionSignature(tree: Ast, func: Ast.full.FnProto) []const u8 {
|
pub fn getFunctionSignature(tree: Ast, func: Ast.full.FnProto) []const u8 {
|
||||||
const start = offsets.tokenLocation(tree, func.ast.fn_token);
|
const start = offsets.tokenToLoc(tree, func.ast.fn_token);
|
||||||
|
|
||||||
const end = if (func.ast.return_type != 0)
|
const end = if (func.ast.return_type != 0)
|
||||||
offsets.tokenLocation(tree, ast.lastToken(tree, func.ast.return_type))
|
offsets.tokenToLoc(tree, ast.lastToken(tree, func.ast.return_type))
|
||||||
else
|
else
|
||||||
start;
|
start;
|
||||||
return tree.source[start.start..end.end];
|
return tree.source[start.start..end.end];
|
||||||
@ -216,8 +216,8 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 {
|
pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 {
|
||||||
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
const start = offsets.tokenToIndex(tree, var_decl.ast.mut_token);
|
||||||
const end = offsets.tokenLocation(tree, ast.lastToken(tree, var_decl.ast.init_node)).end;
|
const end = offsets.tokenToLoc(tree, ast.lastToken(tree, var_decl.ast.init_node)).end;
|
||||||
return tree.source[start..end];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -225,9 +225,9 @@ pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []c
|
|||||||
if (field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) {
|
if (field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) {
|
||||||
return ""; // TODO display the container's type
|
return ""; // TODO display the container's type
|
||||||
}
|
}
|
||||||
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
const start = offsets.tokenToIndex(tree, field.ast.name_token);
|
||||||
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
||||||
const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end;
|
const end = offsets.tokenToLoc(tree, ast.lastToken(tree, end_node)).end;
|
||||||
return tree.source[start..end];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -653,7 +653,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
return try resolveTypeOfNodeInternal(store, arena, value, bound_type_params);
|
return try resolveTypeOfNodeInternal(store, arena, value, bound_type_params);
|
||||||
},
|
},
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const name = tree.getNodeSource(node);
|
const name = offsets.nodeToSlice(tree, node);
|
||||||
|
|
||||||
if (isTypeIdent(name)) {
|
if (isTypeIdent(name)) {
|
||||||
return TypeWithHandle{
|
return TypeWithHandle{
|
||||||
@ -1362,9 +1362,8 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool {
|
fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool {
|
||||||
const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start;
|
const loc = offsets.nodeToLoc(tree, node);
|
||||||
const last_token = offsets.tokenLocation(tree, ast.lastToken(tree, node)).end;
|
return source_index >= loc.start and source_index <= loc.end;
|
||||||
return source_index >= first_token and source_index <= last_token;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||||
@ -1402,16 +1401,14 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con
|
|||||||
return import_str[1 .. import_str.len - 1];
|
return import_str[1 .. import_str.len - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const SourceRange = std.zig.Token.Loc;
|
|
||||||
|
|
||||||
pub const PositionContext = union(enum) {
|
pub const PositionContext = union(enum) {
|
||||||
builtin: SourceRange,
|
builtin: offsets.Loc,
|
||||||
comment,
|
comment,
|
||||||
import_string_literal: SourceRange,
|
import_string_literal: offsets.Loc,
|
||||||
embedfile_string_literal: SourceRange,
|
embedfile_string_literal: offsets.Loc,
|
||||||
string_literal: SourceRange,
|
string_literal: offsets.Loc,
|
||||||
field_access: SourceRange,
|
field_access: offsets.Loc,
|
||||||
var_access: SourceRange,
|
var_access: offsets.Loc,
|
||||||
global_error_set,
|
global_error_set,
|
||||||
enum_literal,
|
enum_literal,
|
||||||
pre_label,
|
pre_label,
|
||||||
@ -1419,7 +1416,7 @@ pub const PositionContext = union(enum) {
|
|||||||
other,
|
other,
|
||||||
empty,
|
empty,
|
||||||
|
|
||||||
pub fn range(self: PositionContext) ?SourceRange {
|
pub fn loc(self: PositionContext) ?offsets.Loc {
|
||||||
return switch (self) {
|
return switch (self) {
|
||||||
.builtin => |r| r,
|
.builtin => |r| r,
|
||||||
.comment => null,
|
.comment => null,
|
||||||
@ -1450,29 +1447,28 @@ fn peek(allocator: std.mem.Allocator, arr: *std.ArrayListUnmanaged(StackState))
|
|||||||
return &arr.items[arr.items.len - 1];
|
return &arr.items[arr.items.len - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokenRangeAppend(prev: SourceRange, token: std.zig.Token) SourceRange {
|
fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc {
|
||||||
return .{
|
return .{
|
||||||
.start = prev.start,
|
.start = prev.start,
|
||||||
.end = token.loc.end,
|
.end = token.loc.end,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const DocumentPosition = offsets.DocumentPosition;
|
pub fn getPositionContext(allocator: std.mem.Allocator, document: types.TextDocument, doc_index: usize) !PositionContext {
|
||||||
|
const line_loc = offsets.lineLocUntilIndex(document.text, doc_index);
|
||||||
|
const line = offsets.locToSlice(document.text, line_loc);
|
||||||
|
|
||||||
pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.TextDocument, doc_position: DocumentPosition) !PositionContext {
|
var stack = try std.ArrayListUnmanaged(StackState).initCapacity(allocator, 8);
|
||||||
const line = doc_position.line;
|
defer stack.deinit(allocator);
|
||||||
|
|
||||||
const line_mem_start = @ptrToInt(line.ptr) - @ptrToInt(document.mem.ptr);
|
|
||||||
var stack = std.ArrayListUnmanaged(StackState){};
|
|
||||||
try stack.ensureTotalCapacity(arena.allocator(), 8);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
var held_line = document.borrowNullTerminatedSlice(
|
var held_line = document.borrowNullTerminatedSlice(0, line_loc.end);
|
||||||
line_mem_start,
|
|
||||||
line_mem_start + doc_position.line_index,
|
|
||||||
);
|
|
||||||
defer held_line.release();
|
defer held_line.release();
|
||||||
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
var tokenizer: std.zig.Tokenizer = .{
|
||||||
|
.buffer = held_line.data(),
|
||||||
|
.index = line_loc.start,
|
||||||
|
.pending_invalid_token = null,
|
||||||
|
};
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const tok = tokenizer.next();
|
const tok = tokenizer.next();
|
||||||
@ -1480,11 +1476,11 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
switch (tok.tag) {
|
switch (tok.tag) {
|
||||||
.invalid => {
|
.invalid => {
|
||||||
// Single '@' do not return a builtin token so we check this on our own.
|
// Single '@' do not return a builtin token so we check this on our own.
|
||||||
if (line[doc_position.line_index - 1] == '@') {
|
if (line[line.len - 1] == '@') {
|
||||||
return PositionContext{
|
return PositionContext{
|
||||||
.builtin = .{
|
.builtin = .{
|
||||||
.start = doc_position.line_index - 1,
|
.start = line_loc.end - 1,
|
||||||
.end = doc_position.line_index,
|
.end = line_loc.end,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -1496,7 +1492,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
}
|
}
|
||||||
|
|
||||||
// State changes
|
// State changes
|
||||||
var curr_ctx = try peek(arena.allocator(), &stack);
|
var curr_ctx = try peek(allocator, &stack);
|
||||||
switch (tok.tag) {
|
switch (tok.tag) {
|
||||||
.string_literal, .multiline_string_literal_line => string_lit_block: {
|
.string_literal, .multiline_string_literal_line => string_lit_block: {
|
||||||
if (curr_ctx.stack_id == .Paren and stack.items.len >= 2) {
|
if (curr_ctx.stack_id == .Paren and stack.items.len >= 2) {
|
||||||
@ -1539,7 +1535,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
.other => {},
|
.other => {},
|
||||||
.global_error_set => {},
|
.global_error_set => {},
|
||||||
else => curr_ctx.ctx = .{
|
else => curr_ctx.ctx = .{
|
||||||
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
|
.field_access = tokenLocAppend(curr_ctx.ctx.loc().?, tok),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.keyword_break, .keyword_continue => curr_ctx.ctx = .pre_label,
|
.keyword_break, .keyword_continue => curr_ctx.ctx = .pre_label,
|
||||||
@ -1552,18 +1548,18 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
.field_access => {},
|
.field_access => {},
|
||||||
else => curr_ctx.ctx = .empty,
|
else => curr_ctx.ctx = .empty,
|
||||||
},
|
},
|
||||||
.l_paren => try stack.append(arena.allocator(), .{ .ctx = .empty, .stack_id = .Paren }),
|
.l_paren => try stack.append(allocator, .{ .ctx = .empty, .stack_id = .Paren }),
|
||||||
.l_bracket => try stack.append(arena.allocator(), .{ .ctx = .empty, .stack_id = .Bracket }),
|
.l_bracket => try stack.append(allocator, .{ .ctx = .empty, .stack_id = .Bracket }),
|
||||||
.r_paren => {
|
.r_paren => {
|
||||||
_ = stack.pop();
|
_ = stack.pop();
|
||||||
if (curr_ctx.stack_id != .Paren) {
|
if (curr_ctx.stack_id != .Paren) {
|
||||||
(try peek(arena.allocator(), &stack)).ctx = .empty;
|
(try peek(allocator, &stack)).ctx = .empty;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.r_bracket => {
|
.r_bracket => {
|
||||||
_ = stack.pop();
|
_ = stack.pop();
|
||||||
if (curr_ctx.stack_id != .Bracket) {
|
if (curr_ctx.stack_id != .Bracket) {
|
||||||
(try peek(arena.allocator(), &stack)).ctx = .empty;
|
(try peek(allocator, &stack)).ctx = .empty;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.keyword_error => curr_ctx.ctx = .global_error_set,
|
.keyword_error => curr_ctx.ctx = .global_error_set,
|
||||||
@ -1572,7 +1568,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
|
|
||||||
switch (curr_ctx.ctx) {
|
switch (curr_ctx.ctx) {
|
||||||
.field_access => |r| curr_ctx.ctx = .{
|
.field_access => |r| curr_ctx.ctx = .{
|
||||||
.field_access = tokenRangeAppend(r, tok),
|
.field_access = tokenLocAppend(r, tok),
|
||||||
},
|
},
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
@ -1586,31 +1582,30 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
.label => |filled| {
|
.label => |filled| {
|
||||||
// We need to check this because the state could be a filled
|
// We need to check this because the state could be a filled
|
||||||
// label if only a space follows it
|
// label if only a space follows it
|
||||||
const last_char = line[doc_position.line_index - 1];
|
if (!filled or line[line.len - 1] != ' ') {
|
||||||
if (!filled or last_char != ' ') {
|
|
||||||
break :block state.ctx;
|
break :block state.ctx;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
else => break :block state.ctx,
|
else => break :block state.ctx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (doc_position.line_index < line.len) {
|
|
||||||
var held_line = document.borrowNullTerminatedSlice(
|
|
||||||
line_mem_start + doc_position.line_index,
|
|
||||||
line_mem_start + line.len,
|
|
||||||
);
|
|
||||||
defer held_line.release();
|
|
||||||
|
|
||||||
switch (line[doc_position.line_index]) {
|
if (line.len == 0) return .empty;
|
||||||
'a'...'z', 'A'...'Z', '_', '@' => {},
|
|
||||||
else => break :block .empty,
|
var held_line = document.borrowNullTerminatedSlice(line_loc.start, line_loc.end);
|
||||||
}
|
defer held_line.release();
|
||||||
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
|
||||||
const tok = tokenizer.next();
|
switch (line[0]) {
|
||||||
if (tok.tag == .identifier)
|
'a'...'z', 'A'...'Z', '_', '@' => {},
|
||||||
break :block PositionContext{ .var_access = tok.loc };
|
else => break :block .empty,
|
||||||
|
}
|
||||||
|
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
||||||
|
const tok = tokenizer.next();
|
||||||
|
if (tok.tag == .identifier) {
|
||||||
|
break :block PositionContext{ .var_access = tok.loc };
|
||||||
|
} else {
|
||||||
|
break :block .empty;
|
||||||
}
|
}
|
||||||
break :block .empty;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1755,11 +1750,6 @@ fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Inde
|
|||||||
}
|
}
|
||||||
|
|
||||||
const GetDocumentSymbolsContext = struct {
|
const GetDocumentSymbolsContext = struct {
|
||||||
prev_loc: offsets.TokenLocation = .{
|
|
||||||
.line = 0,
|
|
||||||
.column = 0,
|
|
||||||
.offset = 0,
|
|
||||||
},
|
|
||||||
symbols: *std.ArrayListUnmanaged(types.DocumentSymbol),
|
symbols: *std.ArrayListUnmanaged(types.DocumentSymbol),
|
||||||
encoding: offsets.Encoding,
|
encoding: offsets.Encoding,
|
||||||
};
|
};
|
||||||
@ -1769,30 +1759,7 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast
|
|||||||
if (name.len == 0)
|
if (name.len == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
const starts = tree.tokens.items(.start);
|
const range = offsets.nodeToRange(tree, node, context.encoding);
|
||||||
const start_loc = context.prev_loc.add(try offsets.tokenRelativeLocation(
|
|
||||||
tree,
|
|
||||||
context.prev_loc.offset,
|
|
||||||
starts[tree.firstToken(node)],
|
|
||||||
context.encoding,
|
|
||||||
));
|
|
||||||
const end_loc = start_loc.add(try offsets.tokenRelativeLocation(
|
|
||||||
tree,
|
|
||||||
start_loc.offset,
|
|
||||||
starts[ast.lastToken(tree, node)],
|
|
||||||
context.encoding,
|
|
||||||
));
|
|
||||||
context.prev_loc = end_loc;
|
|
||||||
const range = types.Range{
|
|
||||||
.start = .{
|
|
||||||
.line = @intCast(i64, start_loc.line),
|
|
||||||
.character = @intCast(i64, start_loc.column),
|
|
||||||
},
|
|
||||||
.end = .{
|
|
||||||
.line = @intCast(i64, end_loc.line),
|
|
||||||
.character = @intCast(i64, end_loc.column),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
(try context.symbols.addOne(allocator)).* = .{
|
(try context.symbols.addOne(allocator)).* = .{
|
||||||
@ -1828,7 +1795,6 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast
|
|||||||
var children = std.ArrayListUnmanaged(types.DocumentSymbol){};
|
var children = std.ArrayListUnmanaged(types.DocumentSymbol){};
|
||||||
|
|
||||||
var child_context = GetDocumentSymbolsContext{
|
var child_context = GetDocumentSymbolsContext{
|
||||||
.prev_loc = start_loc,
|
|
||||||
.symbols = &children,
|
.symbols = &children,
|
||||||
.encoding = context.encoding,
|
.encoding = context.encoding,
|
||||||
};
|
};
|
||||||
@ -1913,11 +1879,6 @@ pub const DeclWithHandle = struct {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn location(self: DeclWithHandle, encoding: offsets.Encoding) !offsets.TokenLocation {
|
|
||||||
const tree = self.handle.tree;
|
|
||||||
return try offsets.tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[self.nameToken()], encoding);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isPublic(self: DeclWithHandle) bool {
|
fn isPublic(self: DeclWithHandle) bool {
|
||||||
return switch (self.decl.*) {
|
return switch (self.decl.*) {
|
||||||
.ast_node => |node| isNodePublic(self.handle.tree, node),
|
.ast_node => |node| isNodePublic(self.handle.tree, node),
|
||||||
@ -2108,7 +2069,7 @@ pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllo
|
|||||||
|
|
||||||
pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||||
for (handle.document_scope.scopes.items) |scope| {
|
for (handle.document_scope.scopes.items) |scope| {
|
||||||
if (source_index >= scope.range.start and source_index < scope.range.end) {
|
if (source_index >= scope.loc.start and source_index < scope.loc.end) {
|
||||||
var decl_it = scope.decls.iterator();
|
var decl_it = scope.decls.iterator();
|
||||||
while (decl_it.next()) |entry| {
|
while (decl_it.next()) |entry| {
|
||||||
switch (entry.value_ptr.*) {
|
switch (entry.value_ptr.*) {
|
||||||
@ -2118,13 +2079,13 @@ pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptim
|
|||||||
try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
|
try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (scope.range.start >= source_index) return;
|
if (scope.loc.start >= source_index) return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
||||||
for (handle.document_scope.scopes.items) |scope| {
|
for (handle.document_scope.scopes.items) |scope| {
|
||||||
if (source_index >= scope.range.start and source_index <= scope.range.end) {
|
if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
|
||||||
var decl_it = scope.decls.iterator();
|
var decl_it = scope.decls.iterator();
|
||||||
while (decl_it.next()) |entry| {
|
while (decl_it.next()) |entry| {
|
||||||
if (entry.value_ptr.* == .ast_node and
|
if (entry.value_ptr.* == .ast_node and
|
||||||
@ -2159,7 +2120,7 @@ fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAll
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scope.range.start >= source_index) return;
|
if (scope.loc.start >= source_index) return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2173,13 +2134,13 @@ pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usiz
|
|||||||
|
|
||||||
var current: usize = 0;
|
var current: usize = 0;
|
||||||
for (handle.document_scope.scopes.items[1..]) |*scope, idx| {
|
for (handle.document_scope.scopes.items[1..]) |*scope, idx| {
|
||||||
if (source_index >= scope.range.start and source_index <= scope.range.end) {
|
if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
|
||||||
switch (scope.data) {
|
switch (scope.data) {
|
||||||
.container, .function, .block => current = idx + 1,
|
.container, .function, .block => current = idx + 1,
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (scope.range.start > source_index) break;
|
if (scope.loc.start > source_index) break;
|
||||||
}
|
}
|
||||||
return current;
|
return current;
|
||||||
}
|
}
|
||||||
@ -2193,13 +2154,13 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty
|
|||||||
if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
||||||
|
|
||||||
for (handle.document_scope.scopes.items[1..]) |scope| {
|
for (handle.document_scope.scopes.items[1..]) |scope| {
|
||||||
if (source_index >= scope.range.start and source_index <= scope.range.end) {
|
if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
|
||||||
switch (scope.data) {
|
switch (scope.data) {
|
||||||
.container => |node| current = node,
|
.container => |node| current = node,
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (scope.range.start > source_index) break;
|
if (scope.loc.start > source_index) break;
|
||||||
}
|
}
|
||||||
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
||||||
}
|
}
|
||||||
@ -2238,7 +2199,7 @@ fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []co
|
|||||||
|
|
||||||
pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
||||||
for (handle.document_scope.scopes.items) |scope| {
|
for (handle.document_scope.scopes.items) |scope| {
|
||||||
if (source_index >= scope.range.start and source_index < scope.range.end) {
|
if (source_index >= scope.loc.start and source_index < scope.loc.end) {
|
||||||
if (scope.decls.getEntry(symbol)) |candidate| {
|
if (scope.decls.getEntry(symbol)) |candidate| {
|
||||||
switch (candidate.value_ptr.*) {
|
switch (candidate.value_ptr.*) {
|
||||||
.label_decl => {},
|
.label_decl => {},
|
||||||
@ -2250,7 +2211,7 @@ pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_ind
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (scope.range.start > source_index) return null;
|
if (scope.loc.start > source_index) return null;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -2261,7 +2222,7 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
|||||||
var curr = innermost_scope_idx;
|
var curr = innermost_scope_idx;
|
||||||
while (curr >= 0) : (curr -= 1) {
|
while (curr >= 0) : (curr -= 1) {
|
||||||
const scope = &handle.document_scope.scopes.items[curr];
|
const scope = &handle.document_scope.scopes.items[curr];
|
||||||
if (source_index >= scope.range.start and source_index <= scope.range.end) blk: {
|
if (source_index >= scope.loc.start and source_index <= scope.loc.end) blk: {
|
||||||
if (scope.decls.getEntry(symbol)) |candidate| {
|
if (scope.decls.getEntry(symbol)) |candidate| {
|
||||||
switch (candidate.value_ptr.*) {
|
switch (candidate.value_ptr.*) {
|
||||||
.ast_node => |node| {
|
.ast_node => |node| {
|
||||||
@ -2354,13 +2315,13 @@ pub const DocumentScope = struct {
|
|||||||
for (self.scopes.items) |scope| {
|
for (self.scopes.items) |scope| {
|
||||||
log.debug(
|
log.debug(
|
||||||
\\--------------------------
|
\\--------------------------
|
||||||
\\Scope {}, range: [{d}, {d})
|
\\Scope {}, loc: [{d}, {d})
|
||||||
\\ {d} usingnamespaces
|
\\ {d} usingnamespaces
|
||||||
\\Decls:
|
\\Decls:
|
||||||
, .{
|
, .{
|
||||||
scope.data,
|
scope.data,
|
||||||
scope.range.start,
|
scope.loc.start,
|
||||||
scope.range.end,
|
scope.loc.end,
|
||||||
scope.uses.len,
|
scope.uses.len,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -2398,7 +2359,7 @@ pub const Scope = struct {
|
|||||||
other,
|
other,
|
||||||
};
|
};
|
||||||
|
|
||||||
range: SourceRange,
|
loc: offsets.Loc,
|
||||||
decls: std.StringHashMapUnmanaged(Declaration) = .{},
|
decls: std.StringHashMapUnmanaged(Declaration) = .{},
|
||||||
tests: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
|
tests: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
|
||||||
uses: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
|
uses: std.ArrayListUnmanaged(Ast.Node.Index) = .{},
|
||||||
@ -2438,16 +2399,6 @@ pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope
|
|||||||
return document_scope;
|
return document_scope;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeSourceRange(tree: Ast, node: Ast.Node.Index) SourceRange {
|
|
||||||
const loc_start = offsets.tokenLocation(tree, tree.firstToken(node));
|
|
||||||
const loc_end = offsets.tokenLocation(tree, ast.lastToken(tree, node));
|
|
||||||
|
|
||||||
return SourceRange{
|
|
||||||
.start = loc_start.start,
|
|
||||||
.end = loc_end.end,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const ScopeContext = struct {
|
const ScopeContext = struct {
|
||||||
scopes: *std.ArrayListUnmanaged(Scope),
|
scopes: *std.ArrayListUnmanaged(Scope),
|
||||||
enums: *CompletionSet,
|
enums: *CompletionSet,
|
||||||
@ -2469,7 +2420,7 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
|
|||||||
|
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = nodeSourceRange(tree, node_idx),
|
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||||
.data = .{ .container = node_idx },
|
.data = .{ .container = node_idx },
|
||||||
};
|
};
|
||||||
const scope_idx = scopes.items.len - 1;
|
const scope_idx = scopes.items.len - 1;
|
||||||
@ -2592,7 +2543,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
const func = ast.fnProto(tree, node_idx, &buf).?;
|
const func = ast.fnProto(tree, node_idx, &buf).?;
|
||||||
|
|
||||||
try scopes.append(allocator, .{
|
try scopes.append(allocator, .{
|
||||||
.range = nodeSourceRange(tree, node_idx),
|
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||||
.data = .{ .function = node_idx },
|
.data = .{ .function = node_idx },
|
||||||
});
|
});
|
||||||
const scope_idx = scopes.items.len - 1;
|
const scope_idx = scopes.items.len - 1;
|
||||||
@ -2641,9 +2592,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
if (token_tags[first_token] == .identifier) {
|
if (token_tags[first_token] == .identifier) {
|
||||||
const scope = try scopes.addOne(allocator);
|
const scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, main_tokens[node_idx]).start,
|
.start = offsets.tokenToIndex(tree, main_tokens[node_idx]),
|
||||||
.end = offsets.tokenLocation(tree, last_token).start,
|
.end = offsets.tokenToLoc(tree, last_token).start,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2651,7 +2602,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
}
|
}
|
||||||
|
|
||||||
try scopes.append(allocator, .{
|
try scopes.append(allocator, .{
|
||||||
.range = nodeSourceRange(tree, node_idx),
|
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||||
.data = .{ .container = node_idx },
|
.data = .{ .container = node_idx },
|
||||||
});
|
});
|
||||||
const scope_idx = scopes.items.len - 1;
|
const scope_idx = scopes.items.len - 1;
|
||||||
@ -2685,9 +2636,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
if (if_node.payload_token) |payload| {
|
if (if_node.payload_token) |payload| {
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, payload).start,
|
.start = offsets.tokenToIndex(tree, payload),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.then_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.then_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2711,9 +2662,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
std.debug.assert(token_tags[err_token] == .identifier);
|
std.debug.assert(token_tags[err_token] == .identifier);
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, err_token).start,
|
.start = offsets.tokenToIndex(tree, err_token),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.else_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, if_node.ast.else_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2732,9 +2683,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
|
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, tree.firstToken(catch_expr)).start,
|
.start = offsets.tokenToIndex(tree, tree.firstToken(catch_expr)),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, catch_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, catch_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2761,9 +2712,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
std.debug.assert(token_tags[label] == .identifier);
|
std.debug.assert(token_tags[label] == .identifier);
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, while_node.ast.while_token).start,
|
.start = offsets.tokenToIndex(tree, while_node.ast.while_token),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, node_idx)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, node_idx)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2774,9 +2725,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
if (while_node.payload_token) |payload| {
|
if (while_node.payload_token) |payload| {
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, payload).start,
|
.start = offsets.tokenToIndex(tree, payload),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.then_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.then_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2818,9 +2769,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
std.debug.assert(token_tags[err_token] == .identifier);
|
std.debug.assert(token_tags[err_token] == .identifier);
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, err_token).start,
|
.start = offsets.tokenToIndex(tree, err_token),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.else_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, while_node.ast.else_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -2848,9 +2799,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
if (switch_case.payload_token) |payload| {
|
if (switch_case.payload_token) |payload| {
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, payload).start,
|
.start = offsets.tokenToIndex(tree, payload),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
@ -3010,9 +2961,9 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
const payload_token = data[node_idx].lhs;
|
const payload_token = data[node_idx].lhs;
|
||||||
var scope = try scopes.addOne(allocator);
|
var scope = try scopes.addOne(allocator);
|
||||||
scope.* = .{
|
scope.* = .{
|
||||||
.range = .{
|
.loc = .{
|
||||||
.start = offsets.tokenLocation(tree, payload_token).start,
|
.start = offsets.tokenToIndex(tree, payload_token),
|
||||||
.end = offsets.tokenLocation(tree, ast.lastToken(tree, expr)).end,
|
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, expr)).end,
|
||||||
},
|
},
|
||||||
.data = .other,
|
.data = .other,
|
||||||
};
|
};
|
||||||
|
12
src/diff.zig
12
src/diff.zig
@ -317,14 +317,14 @@ fn char_pos_to_range(
|
|||||||
}) {
|
}) {
|
||||||
if (start >= char_pos and start <= char_pos + line.len) {
|
if (start >= char_pos and start <= char_pos + line.len) {
|
||||||
result_start_pos = .{
|
result_start_pos = .{
|
||||||
.line = @intCast(i64, line_pos),
|
.line = @intCast(u32, line_pos),
|
||||||
.character = @intCast(i64, start - char_pos),
|
.character = @intCast(u32, start - char_pos),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (end >= char_pos and end <= char_pos + line.len) {
|
if (end >= char_pos and end <= char_pos + line.len) {
|
||||||
result_end_pos = .{
|
result_end_pos = .{
|
||||||
.line = @intCast(i64, line_pos),
|
.line = @intCast(u32, line_pos),
|
||||||
.character = @intCast(i64, end - char_pos),
|
.character = @intCast(u32, end - char_pos),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -335,8 +335,8 @@ fn char_pos_to_range(
|
|||||||
// string for some reason so clamp it to the string end position
|
// string for some reason so clamp it to the string end position
|
||||||
if (result_end_pos == null) {
|
if (result_end_pos == null) {
|
||||||
result_end_pos = types.Position{
|
result_end_pos = types.Position{
|
||||||
.line = @intCast(i64, line_pos),
|
.line = @intCast(u32, line_pos),
|
||||||
.character = @intCast(i64, char_pos),
|
.character = @intCast(u32, char_pos),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ const std = @import("std");
|
|||||||
const DocumentStore = @import("DocumentStore.zig");
|
const DocumentStore = @import("DocumentStore.zig");
|
||||||
const analysis = @import("analysis.zig");
|
const analysis = @import("analysis.zig");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
|
const offsets = @import("offsets.zig");
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const log = std.log.scoped(.inlay_hint);
|
const log = std.log.scoped(.inlay_hint);
|
||||||
const ast = @import("ast.zig");
|
const ast = @import("ast.zig");
|
||||||
@ -35,16 +36,7 @@ const Builder = struct {
|
|||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
hints: std.ArrayListUnmanaged(types.InlayHint),
|
hints: std.ArrayListUnmanaged(types.InlayHint),
|
||||||
hover_kind: types.MarkupContent.Kind,
|
hover_kind: types.MarkupContent.Kind,
|
||||||
|
encoding: offsets.Encoding,
|
||||||
fn init(allocator: std.mem.Allocator, config: *const Config, handle: *DocumentStore.Handle, hover_kind: types.MarkupContent.Kind) Builder {
|
|
||||||
return Builder{
|
|
||||||
.allocator = allocator,
|
|
||||||
.config = config,
|
|
||||||
.handle = handle,
|
|
||||||
.hints = std.ArrayListUnmanaged(types.InlayHint){},
|
|
||||||
.hover_kind = hover_kind,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deinit(self: *Builder) void {
|
fn deinit(self: *Builder) void {
|
||||||
for (self.hints.items) |hint| {
|
for (self.hints.items) |hint| {
|
||||||
@ -53,7 +45,7 @@ const Builder = struct {
|
|||||||
self.hints.deinit(self.allocator);
|
self.hints.deinit(self.allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn appendParameterHint(self: *Builder, position: Ast.Location, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void {
|
fn appendParameterHint(self: *Builder, position: types.Position, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void {
|
||||||
// TODO allocation could be avoided by extending InlayHint.jsonStringify
|
// TODO allocation could be avoided by extending InlayHint.jsonStringify
|
||||||
// adding tooltip_noalias & tooltip_comptime to InlayHint should be enough
|
// adding tooltip_noalias & tooltip_comptime to InlayHint should be enough
|
||||||
const tooltip_text = blk: {
|
const tooltip_text = blk: {
|
||||||
@ -68,10 +60,7 @@ const Builder = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
try self.hints.append(self.allocator, .{
|
try self.hints.append(self.allocator, .{
|
||||||
.position = .{
|
.position = position,
|
||||||
.line = @intCast(i64, position.line),
|
|
||||||
.character = @intCast(i64, position.column),
|
|
||||||
},
|
|
||||||
.label = label,
|
.label = label,
|
||||||
.kind = types.InlayHintKind.Parameter,
|
.kind = types.InlayHintKind.Parameter,
|
||||||
.tooltip = .{
|
.tooltip = .{
|
||||||
@ -110,7 +99,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
|
|||||||
}
|
}
|
||||||
|
|
||||||
while (ast.nextFnParam(&it)) |param| : (i += 1) {
|
while (ast.nextFnParam(&it)) |param| : (i += 1) {
|
||||||
if (param.name_token == null) continue;
|
const name_token = param.name_token orelse continue;
|
||||||
if (i >= call.ast.params.len) break;
|
if (i >= call.ast.params.len) break;
|
||||||
|
|
||||||
const token_tags = decl_tree.tokens.items(.tag);
|
const token_tags = decl_tree.tokens.items(.tag);
|
||||||
@ -121,11 +110,11 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
|
|||||||
const tooltip = if (param.anytype_ellipsis3) |token|
|
const tooltip = if (param.anytype_ellipsis3) |token|
|
||||||
if (token_tags[token] == .keyword_anytype) "anytype" else ""
|
if (token_tags[token] == .keyword_anytype) "anytype" else ""
|
||||||
else
|
else
|
||||||
decl_tree.getNodeSource(param.type_expr);
|
offsets.nodeToSlice(decl_tree, param.type_expr);
|
||||||
|
|
||||||
try builder.appendParameterHint(
|
try builder.appendParameterHint(
|
||||||
tree.tokenLocation(0, tree.firstToken(call.ast.params[i])),
|
offsets.tokenToPosition(tree, tree.firstToken(call.ast.params[i]), builder.encoding),
|
||||||
decl_tree.tokenSlice(param.name_token.?),
|
decl_tree.tokenSlice(name_token),
|
||||||
tooltip,
|
tooltip,
|
||||||
no_alias,
|
no_alias,
|
||||||
comp_time,
|
comp_time,
|
||||||
@ -165,7 +154,7 @@ fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, argum
|
|||||||
}
|
}
|
||||||
|
|
||||||
try builder.appendParameterHint(
|
try builder.appendParameterHint(
|
||||||
tree.tokenLocation(0, tree.firstToken(parameters[i])),
|
offsets.tokenToPosition(tree, tree.firstToken(parameters[i]), builder.encoding),
|
||||||
label orelse "",
|
label orelse "",
|
||||||
std.mem.trim(u8, type_expr, " \t\n"),
|
std.mem.trim(u8, type_expr, " \t\n"),
|
||||||
no_alias,
|
no_alias,
|
||||||
@ -203,23 +192,16 @@ fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
|||||||
const rhsToken = node_data[call.ast.fn_expr].rhs;
|
const rhsToken = node_data[call.ast.fn_expr].rhs;
|
||||||
std.debug.assert(token_tags[rhsToken] == .identifier);
|
std.debug.assert(token_tags[rhsToken] == .identifier);
|
||||||
|
|
||||||
const lhsLocation = tree.tokenLocation(0, lhsToken);
|
const start = offsets.tokenToIndex(tree, lhsToken);
|
||||||
const rhsLocation = tree.tokenLocation(0, rhsToken);
|
const rhs_loc = offsets.tokenToLoc(tree, rhsToken);
|
||||||
|
|
||||||
const absolute_index = rhsLocation.line_start + rhsLocation.column;
|
var held_range = handle.document.borrowNullTerminatedSlice(start, rhs_loc.end);
|
||||||
|
|
||||||
const range = .{
|
|
||||||
.start = lhsLocation.line_start + lhsLocation.column,
|
|
||||||
.end = rhsLocation.line_start + rhsLocation.column + tree.tokenSlice(rhsToken).len,
|
|
||||||
};
|
|
||||||
|
|
||||||
var held_range = handle.document.borrowNullTerminatedSlice(range.start, range.end);
|
|
||||||
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||||
|
|
||||||
// note: we have the ast node, traversing it would probably yield better results
|
// note: we have the ast node, traversing it would probably yield better results
|
||||||
// than trying to re-tokenize and re-parse it
|
// than trying to re-tokenize and re-parse it
|
||||||
errdefer held_range.release();
|
errdefer held_range.release();
|
||||||
if (try analysis.getFieldAccessType(store, arena, handle, absolute_index, &tokenizer)) |result| {
|
if (try analysis.getFieldAccessType(store, arena, handle, rhs_loc.end, &tokenizer)) |result| {
|
||||||
held_range.release();
|
held_range.release();
|
||||||
const container_handle = result.unwrapped orelse result.original;
|
const container_handle = result.unwrapped orelse result.original;
|
||||||
switch (container_handle.type.data) {
|
switch (container_handle.type.data) {
|
||||||
@ -676,8 +658,23 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
|||||||
/// only hints in the given range are created
|
/// only hints in the given range are created
|
||||||
/// Caller owns returned memory.
|
/// Caller owns returned memory.
|
||||||
/// `InlayHint.tooltip.value` has to deallocated separately
|
/// `InlayHint.tooltip.value` has to deallocated separately
|
||||||
pub fn writeRangeInlayHint(arena: *std.heap.ArenaAllocator, config: *const Config, store: *DocumentStore, handle: *DocumentStore.Handle, range: types.Range, hover_kind: types.MarkupContent.Kind) error{OutOfMemory}![]types.InlayHint {
|
pub fn writeRangeInlayHint(
|
||||||
var builder = Builder.init(arena.child_allocator, config, handle, hover_kind);
|
arena: *std.heap.ArenaAllocator,
|
||||||
|
config: *const Config,
|
||||||
|
store: *DocumentStore,
|
||||||
|
handle: *DocumentStore.Handle,
|
||||||
|
range: types.Range,
|
||||||
|
hover_kind: types.MarkupContent.Kind,
|
||||||
|
encoding: offsets.Encoding,
|
||||||
|
) error{OutOfMemory}![]types.InlayHint {
|
||||||
|
var builder: Builder = .{
|
||||||
|
.allocator = arena.child_allocator,
|
||||||
|
.config = config,
|
||||||
|
.handle = handle,
|
||||||
|
.hints = .{},
|
||||||
|
.hover_kind = hover_kind,
|
||||||
|
.encoding = encoding,
|
||||||
|
};
|
||||||
errdefer builder.deinit();
|
errdefer builder.deinit();
|
||||||
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
492
src/offsets.zig
492
src/offsets.zig
@ -1,179 +1,56 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
|
const ast = @import("ast.zig");
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
|
|
||||||
pub const Encoding = enum {
|
pub const Encoding = types.PositionEncodingKind;
|
||||||
utf8,
|
|
||||||
utf16,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const DocumentPosition = struct {
|
pub const Loc = std.zig.Token.Loc;
|
||||||
line: []const u8,
|
|
||||||
line_index: usize,
|
|
||||||
absolute_index: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn documentPosition(doc: types.TextDocument, position: types.Position, encoding: Encoding) !DocumentPosition {
|
pub fn indexToPosition(text: []const u8, index: usize, encoding: Encoding) types.Position {
|
||||||
var split_iterator = std.mem.split(u8, doc.text, "\n");
|
const last_line_start = if (std.mem.lastIndexOf(u8, text[0..index], "\n")) |line| line + 1 else 0;
|
||||||
|
const line_count = std.mem.count(u8, text[0..last_line_start], "\n");
|
||||||
|
|
||||||
var line_idx: i64 = 0;
|
return .{
|
||||||
var line: []const u8 = "";
|
.line = @intCast(u32, line_count),
|
||||||
while (line_idx < position.line) : (line_idx += 1) {
|
.character = @intCast(u32, countCodeUnits(text[last_line_start..index], encoding)),
|
||||||
line = split_iterator.next() orelse return error.InvalidParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const line_start_idx = split_iterator.index.?;
|
|
||||||
line = split_iterator.next() orelse return error.InvalidParams;
|
|
||||||
|
|
||||||
if (encoding == .utf8) {
|
|
||||||
const index = @intCast(i64, line_start_idx) + position.character;
|
|
||||||
if (index < 0 or index > @intCast(i64, doc.text.len)) {
|
|
||||||
return error.InvalidParams;
|
|
||||||
}
|
|
||||||
return DocumentPosition{
|
|
||||||
.line = line,
|
|
||||||
.absolute_index = @intCast(usize, index),
|
|
||||||
.line_index = @intCast(usize, position.character),
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
const utf8 = doc.text[line_start_idx..];
|
|
||||||
var utf8_idx: usize = 0;
|
|
||||||
var utf16_idx: usize = 0;
|
|
||||||
while (utf16_idx < position.character) {
|
|
||||||
if (utf8_idx > utf8.len) {
|
|
||||||
return error.InvalidParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const n = try std.unicode.utf8ByteSequenceLength(utf8[utf8_idx]);
|
|
||||||
const next_utf8_idx = utf8_idx + n;
|
|
||||||
const codepoint = try std.unicode.utf8Decode(utf8[utf8_idx..next_utf8_idx]);
|
|
||||||
if (codepoint < 0x10000) {
|
|
||||||
utf16_idx += 1;
|
|
||||||
} else {
|
|
||||||
utf16_idx += 2;
|
|
||||||
}
|
|
||||||
utf8_idx = next_utf8_idx;
|
|
||||||
}
|
|
||||||
return DocumentPosition{
|
|
||||||
.line = line,
|
|
||||||
.absolute_index = line_start_idx + utf8_idx,
|
|
||||||
.line_index = utf8_idx,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lineSectionLength(tree: Ast, start_index: usize, end_index: usize, encoding: Encoding) !usize {
|
|
||||||
const source = tree.source[start_index..];
|
|
||||||
std.debug.assert(end_index >= start_index and source.len >= end_index - start_index);
|
|
||||||
if (encoding == .utf8) {
|
|
||||||
return end_index - start_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
var result: usize = 0;
|
|
||||||
var i: usize = 0;
|
|
||||||
while (i + start_index < end_index) {
|
|
||||||
std.debug.assert(source[i] != '\n');
|
|
||||||
|
|
||||||
const n = try std.unicode.utf8ByteSequenceLength(source[i]);
|
|
||||||
if (i + n >= source.len)
|
|
||||||
return error.CodepointTooLong;
|
|
||||||
|
|
||||||
const codepoint = try std.unicode.utf8Decode(source[i .. i + n]);
|
|
||||||
|
|
||||||
result += 1 + @as(usize, @boolToInt(codepoint >= 0x10000));
|
|
||||||
i += n;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const TokenLocation = struct {
|
|
||||||
line: usize,
|
|
||||||
column: usize,
|
|
||||||
offset: usize,
|
|
||||||
|
|
||||||
pub fn add(lhs: TokenLocation, rhs: TokenLocation) TokenLocation {
|
|
||||||
return .{
|
|
||||||
.line = lhs.line + rhs.line,
|
|
||||||
.column = if (rhs.line == 0)
|
|
||||||
lhs.column + rhs.column
|
|
||||||
else
|
|
||||||
rhs.column,
|
|
||||||
.offset = rhs.offset,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn tokenRelativeLocation(tree: Ast, start_index: usize, token_start: usize, encoding: Encoding) !TokenLocation {
|
|
||||||
if (token_start < start_index)
|
|
||||||
return error.InvalidParams;
|
|
||||||
|
|
||||||
var loc = TokenLocation{
|
|
||||||
.line = 0,
|
|
||||||
.column = 0,
|
|
||||||
.offset = 0,
|
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const source = tree.source[start_index..];
|
pub fn positionToIndex(text: []const u8, position: types.Position, encoding: Encoding) usize {
|
||||||
var i: usize = 0;
|
var line: u32 = 0;
|
||||||
while (i + start_index < token_start) {
|
var line_start_index: usize = 0;
|
||||||
const c = source[i];
|
for (text) |c, i| {
|
||||||
|
if (line == position.line) break;
|
||||||
if (c == '\n') {
|
if (c == '\n') {
|
||||||
loc.line += 1;
|
line += 1;
|
||||||
loc.column = 0;
|
line_start_index = i + 1;
|
||||||
i += 1;
|
|
||||||
} else {
|
|
||||||
if (encoding == .utf16) {
|
|
||||||
const n = try std.unicode.utf8ByteSequenceLength(c);
|
|
||||||
if (i + n >= source.len)
|
|
||||||
return error.CodepointTooLong;
|
|
||||||
|
|
||||||
const codepoint = try std.unicode.utf8Decode(source[i .. i + n]);
|
|
||||||
loc.column += 1 + @as(usize, @boolToInt(codepoint >= 0x10000));
|
|
||||||
i += n;
|
|
||||||
} else {
|
|
||||||
loc.column += 1;
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
loc.offset = i + start_index;
|
std.debug.assert(line == position.line);
|
||||||
return loc;
|
|
||||||
|
const line_text = std.mem.sliceTo(text[line_start_index..], '\n');
|
||||||
|
const line_byte_length = getNCodeUnitByteCount(line_text, position.character, encoding);
|
||||||
|
|
||||||
|
return line_start_index + line_byte_length;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts the token is comprised of valid utf8
|
pub fn tokenToIndex(tree: Ast, token_index: Ast.TokenIndex) usize {
|
||||||
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
|
return tree.tokens.items(.start)[token_index];
|
||||||
return locationLength(tokenLocation(tree, token), tree, encoding);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Token location inside source
|
pub fn tokenToLoc(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
||||||
pub const Loc = struct {
|
|
||||||
start: usize,
|
|
||||||
end: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn locationLength(loc: Loc, tree: Ast, encoding: Encoding) usize {
|
|
||||||
if (encoding == .utf8)
|
|
||||||
return loc.end - loc.start;
|
|
||||||
|
|
||||||
var i: usize = loc.start;
|
|
||||||
var utf16_len: usize = 0;
|
|
||||||
while (i < loc.end) {
|
|
||||||
const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable;
|
|
||||||
const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable;
|
|
||||||
if (codepoint < 0x10000) {
|
|
||||||
utf16_len += 1;
|
|
||||||
} else {
|
|
||||||
utf16_len += 2;
|
|
||||||
}
|
|
||||||
i += n;
|
|
||||||
}
|
|
||||||
return utf16_len;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
|
||||||
const start = tree.tokens.items(.start)[token_index];
|
const start = tree.tokens.items(.start)[token_index];
|
||||||
const tag = tree.tokens.items(.tag)[token_index];
|
const tag = tree.tokens.items(.tag)[token_index];
|
||||||
|
|
||||||
|
// Many tokens can be determined entirely by their tag.
|
||||||
|
if (tag.lexeme()) |lexeme| {
|
||||||
|
return .{
|
||||||
|
.start = start,
|
||||||
|
.end = start + lexeme.len,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// For some tokens, re-tokenization is needed to find the end.
|
// For some tokens, re-tokenization is needed to find the end.
|
||||||
var tokenizer: std.zig.Tokenizer = .{
|
var tokenizer: std.zig.Tokenizer = .{
|
||||||
.buffer = tree.source,
|
.buffer = tree.source,
|
||||||
@ -181,100 +58,247 @@ pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
|||||||
.pending_invalid_token = null,
|
.pending_invalid_token = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Maybe combine multi-line tokens?
|
||||||
|
const token = tokenizer.next();
|
||||||
|
// A failure would indicate a corrupted tree.source
|
||||||
|
std.debug.assert(token.tag == tag);
|
||||||
|
return token.loc;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenToSlice(tree: Ast, token_index: Ast.TokenIndex) []const u8 {
|
||||||
|
return locToSlice(tree.source, tokenToLoc(tree, token_index));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenToPosition(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) types.Position {
|
||||||
|
const start = tokenToIndex(tree, token_index);
|
||||||
|
return indexToPosition(tree.source, start, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) types.Range {
|
||||||
|
const start = tokenToPosition(tree, token_index, encoding);
|
||||||
|
const loc = tokenToLoc(tree, token_index);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.start = start,
|
||||||
|
.end = advancePosition(tree.source, start, loc.start, loc.end, encoding),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn locLength(text: []const u8, loc: Loc, encoding: Encoding) usize {
|
||||||
|
return countCodeUnits(text[loc.start..loc.end], encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenLength(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) usize {
|
||||||
|
const loc = tokenToLoc(tree, token_index);
|
||||||
|
return locLength(tree.source, loc, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rangeLength(text: []const u8, range: types.Range, encoding: Encoding) usize {
|
||||||
|
const loc: Loc = .{
|
||||||
|
.start = positionToIndex(text, range.start, encoding),
|
||||||
|
.end = positionToIndex(text, range.end, encoding),
|
||||||
|
};
|
||||||
|
return locLength(text, loc, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenIndexLength(text: [:0]const u8, index: usize, encoding: Encoding) usize {
|
||||||
|
const loc = tokenIndexToLoc(text, index);
|
||||||
|
return locLength(text, loc, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenIndexToLoc(text: [:0]const u8, index: usize) Loc {
|
||||||
|
var tokenizer: std.zig.Tokenizer = .{
|
||||||
|
.buffer = text,
|
||||||
|
.index = index,
|
||||||
|
.pending_invalid_token = null,
|
||||||
|
};
|
||||||
|
|
||||||
const token = tokenizer.next();
|
const token = tokenizer.next();
|
||||||
// HACK, should return error.UnextectedToken
|
|
||||||
if (token.tag != tag) return .{ .start = 0, .end = 0 }; //std.debug.assert(token.tag == tag);
|
|
||||||
return .{ .start = token.loc.start, .end = token.loc.end };
|
return .{ .start = token.loc.start, .end = token.loc.end };
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the range of the given token at `token_index`
|
pub fn tokenPositionToLoc(text: [:0]const u8, position: types.Position, encoding: Encoding) Loc {
|
||||||
pub fn tokenToRange(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) !types.Range {
|
const index = positionToIndex(text, position, encoding);
|
||||||
const loc = try tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[token_index], encoding);
|
return tokenIndexToLoc(text, index);
|
||||||
const length = tokenLength(tree, token_index, encoding);
|
}
|
||||||
|
|
||||||
return types.Range{
|
pub fn tokenIndexToSlice(text: [:0]const u8, index: usize) []const u8 {
|
||||||
.start = .{
|
return locToSlice(text, tokenIndexToLoc(text, index));
|
||||||
.line = @intCast(i64, loc.line),
|
}
|
||||||
.character = @intCast(i64, loc.column),
|
|
||||||
},
|
pub fn tokenPositionToSlice(text: [:0]const u8, position: types.Position) []const u8 {
|
||||||
.end = .{
|
return locToSlice(text, tokenPositionToLoc(text, position));
|
||||||
.line = @intCast(i64, loc.line),
|
}
|
||||||
.character = @intCast(i64, loc.column + length),
|
|
||||||
},
|
pub fn tokenIndexToRange(text: [:0]const u8, index: usize, encoding: Encoding) types.Range {
|
||||||
|
const start = indexToPosition(text, index, encoding);
|
||||||
|
const loc = tokenIndexToLoc(text, index);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.start = start,
|
||||||
|
.end = advancePosition(text, start, loc.start, loc.end, encoding),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the range of a token pointed to by `position`
|
pub fn tokenPositionToRange(text: [:0]const u8, position: types.Position, encoding: Encoding) types.Range {
|
||||||
pub fn tokenPositionToRange(tree: Ast, position: types.Position, encoding: Encoding) !types.Range {
|
const index = positionToIndex(text, position, encoding);
|
||||||
const doc = .{
|
const loc = tokenIndexToLoc(text, index);
|
||||||
.uri = undefined,
|
|
||||||
.text = tree.source,
|
|
||||||
.mem = undefined,
|
|
||||||
};
|
|
||||||
const document_position = try documentPosition(doc, position, encoding);
|
|
||||||
|
|
||||||
var tokenizer: std.zig.Tokenizer = .{
|
return .{
|
||||||
.buffer = tree.source,
|
|
||||||
.index = document_position.absolute_index,
|
|
||||||
.pending_invalid_token = null,
|
|
||||||
};
|
|
||||||
const token = tokenizer.next();
|
|
||||||
const loc: Loc = .{ .start = token.loc.start, .end = token.loc.end };
|
|
||||||
const length = locationLength(loc, tree, encoding);
|
|
||||||
|
|
||||||
return types.Range{
|
|
||||||
.start = position,
|
.start = position,
|
||||||
.end = .{
|
.end = advancePosition(text, position, loc.start, loc.end, encoding),
|
||||||
.line = position.line,
|
|
||||||
.character = position.character + @intCast(i64, length),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range {
|
pub fn locToSlice(text: []const u8, loc: Loc) []const u8 {
|
||||||
var line_idx: i64 = 0;
|
return text[loc.start..loc.end];
|
||||||
var curr_line: []const u8 = doc.text;
|
}
|
||||||
|
|
||||||
var split_iterator = std.mem.split(u8, doc.text, "\n");
|
pub fn locToRange(text: []const u8, loc: Loc, encoding: Encoding) types.Range {
|
||||||
while (split_iterator.next()) |line| : (line_idx += 1) {
|
std.debug.assert(loc.start <= loc.end and loc.end <= text.len);
|
||||||
curr_line = line;
|
const start = indexToPosition(text, loc.start, encoding);
|
||||||
|
return .{
|
||||||
|
.start = start,
|
||||||
|
.end = advancePosition(text, start, loc.start, loc.end, encoding),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nodeToLoc(tree: Ast, node: Ast.Node.Index) Loc {
|
||||||
|
return .{ .start = tokenToIndex(tree, tree.firstToken(node)), .end = tokenToLoc(tree, ast.lastToken(tree, node)).end };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nodeToSlice(tree: Ast, node: Ast.Node.Index) []const u8 {
|
||||||
|
return locToSlice(tree.source, nodeToLoc(tree, node));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nodeToRange(tree: Ast, node: Ast.Node.Index, encoding: Encoding) types.Range {
|
||||||
|
return locToRange(tree.source, nodeToLoc(tree, node), encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineLocAtIndex(text: []const u8, index: usize) Loc {
|
||||||
|
return .{
|
||||||
|
.start = if (std.mem.lastIndexOfScalar(u8, text[0..index], '\n')) |idx| idx + 1 else 0,
|
||||||
|
.end = std.mem.indexOfScalarPos(u8, text, index, '\n') orelse text.len,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineSliceAtIndex(text: []const u8, index: usize) []const u8 {
|
||||||
|
return locToSlice(text, lineLocAtIndex(text, index));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineLocAtPosition(text: []const u8, position: types.Position, encoding: Encoding) Loc {
|
||||||
|
return lineLocAtIndex(text, positionToIndex(text, position, encoding));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineSliceAtPosition(text: []const u8, position: types.Position, encoding: Encoding) []const u8 {
|
||||||
|
return locToSlice(text, lineLocAtPosition(text, position, encoding));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineLocUntilIndex(text: []const u8, index: usize) Loc {
|
||||||
|
return .{
|
||||||
|
.start = if (std.mem.lastIndexOfScalar(u8, text[0..index], '\n')) |idx| idx + 1 else 0,
|
||||||
|
.end = index,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineSliceUntilIndex(text: []const u8, index: usize) []const u8 {
|
||||||
|
return locToSlice(text, lineLocUntilIndex(text, index));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineLocUntilPosition(text: []const u8, position: types.Position, encoding: Encoding) Loc {
|
||||||
|
return lineLocUntilIndex(text, positionToIndex(text, position, encoding));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lineSliceUntilPosition(text: []const u8, position: types.Position, encoding: Encoding) []const u8 {
|
||||||
|
return locToSlice(text, lineLocUntilPosition(text, position, encoding));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn convertPositionEncoding(text: []const u8, position: types.Position, from_encoding: Encoding, to_encoding: Encoding) types.Position {
|
||||||
|
if (from_encoding == to_encoding) return position;
|
||||||
|
|
||||||
|
const line_loc = lineLocUntilPosition(text, position, from_encoding);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.line = position.line,
|
||||||
|
.character = @intCast(u32, locLength(text, line_loc, to_encoding)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn convertRangeEncoding(text: []const u8, range: types.Range, from_encoding: Encoding, to_encoding: Encoding) types.Range {
|
||||||
|
if (from_encoding == to_encoding) return range;
|
||||||
|
return .{
|
||||||
|
.start = convertPositionEncoding(text, range.start, from_encoding, to_encoding),
|
||||||
|
.end = convertPositionEncoding(text, range.end, from_encoding, to_encoding),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
|
||||||
|
/// advance `position` which starts at `from_index` to `to_index` accounting for line breaks
|
||||||
|
pub fn advancePosition(text: []const u8, position: types.Position, from_index: usize, to_index: usize, encoding: Encoding) types.Position {
|
||||||
|
var line = position.line;
|
||||||
|
|
||||||
|
for (text[from_index..to_index]) |c| {
|
||||||
|
if (c == '\n') {
|
||||||
|
line += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (line_idx > 0) line_idx -= 1;
|
const line_loc = lineLocUntilIndex(text, to_index);
|
||||||
|
|
||||||
if (encoding == .utf8) {
|
return .{
|
||||||
return types.Range{
|
.line = line,
|
||||||
.start = .{
|
.character = @intCast(u32, locLength(text, line_loc, encoding)),
|
||||||
.line = 0,
|
};
|
||||||
.character = 0,
|
}
|
||||||
},
|
|
||||||
.end = .{
|
/// returns the number of code units in `text`
|
||||||
.line = line_idx,
|
pub fn countCodeUnits(text: []const u8, encoding: Encoding) usize {
|
||||||
.character = @intCast(i64, curr_line.len),
|
switch (encoding) {
|
||||||
},
|
.utf8 => return text.len,
|
||||||
};
|
.utf16 => {
|
||||||
} else {
|
var iter: std.unicode.Utf8Iterator = .{ .bytes = text, .i = 0 };
|
||||||
var utf16_len: usize = 0;
|
|
||||||
var line_utf8_idx: usize = 0;
|
var utf16_len: usize = 0;
|
||||||
while (line_utf8_idx < curr_line.len) {
|
while (iter.nextCodepoint()) |codepoint| {
|
||||||
const n = try std.unicode.utf8ByteSequenceLength(curr_line[line_utf8_idx]);
|
if (codepoint < 0x10000) {
|
||||||
const codepoint = try std.unicode.utf8Decode(curr_line[line_utf8_idx .. line_utf8_idx + n]);
|
utf16_len += 1;
|
||||||
if (codepoint < 0x10000) {
|
} else {
|
||||||
utf16_len += 1;
|
utf16_len += 2;
|
||||||
} else {
|
}
|
||||||
utf16_len += 2;
|
|
||||||
}
|
}
|
||||||
line_utf8_idx += n;
|
return utf16_len;
|
||||||
}
|
},
|
||||||
return types.Range{
|
.utf32 => return std.unicode.utf8CountCodepoints(text) catch unreachable,
|
||||||
.start = .{
|
}
|
||||||
.line = 0,
|
}
|
||||||
.character = 0,
|
|
||||||
},
|
/// returns the number of (utf-8 code units / bytes) that represent `n` code units in `text`
|
||||||
.end = .{
|
pub fn getNCodeUnitByteCount(text: []const u8, n: usize, encoding: Encoding) usize {
|
||||||
.line = line_idx,
|
switch (encoding) {
|
||||||
.character = @intCast(i64, utf16_len),
|
.utf8 => return n,
|
||||||
},
|
.utf16 => {
|
||||||
};
|
if (n == 0) return 0;
|
||||||
|
var iter: std.unicode.Utf8Iterator = .{ .bytes = text, .i = 0 };
|
||||||
|
|
||||||
|
var utf16_len: usize = 0;
|
||||||
|
while (iter.nextCodepoint()) |codepoint| {
|
||||||
|
if (codepoint < 0x10000) {
|
||||||
|
utf16_len += 1;
|
||||||
|
} else {
|
||||||
|
utf16_len += 2;
|
||||||
|
}
|
||||||
|
if (utf16_len >= n) break;
|
||||||
|
}
|
||||||
|
return iter.i;
|
||||||
|
},
|
||||||
|
.utf32 => {
|
||||||
|
var i: usize = 0;
|
||||||
|
var count: usize = 0;
|
||||||
|
while (count != n) : (count += 1) {
|
||||||
|
i += std.unicode.utf8ByteSequenceLength(text[i]) catch unreachable;
|
||||||
|
}
|
||||||
|
return i;
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@ const ast = @import("ast.zig");
|
|||||||
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
||||||
try handler(context, types.Location{
|
try handler(context, types.Location{
|
||||||
.uri = handle.uri(),
|
.uri = handle.uri(),
|
||||||
.range = offsets.tokenToRange(handle.tree, tok,encoding) catch return,
|
.range = offsets.tokenToRange(handle.tree, tok, encoding),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,7 +111,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
.identifier => {
|
.identifier => {
|
||||||
if (try analysis.lookupSymbolGlobal(store, arena, handle, tree.getNodeSource(node), starts[main_tokens[node]])) |child| {
|
if (try analysis.lookupSymbolGlobal(store, arena, handle, offsets.nodeToSlice(tree, node), starts[main_tokens[node]])) |child| {
|
||||||
if (std.meta.eql(decl, child)) {
|
if (std.meta.eql(decl, child)) {
|
||||||
try tokenReference(handle, main_tokens[node], encoding, context, handler);
|
try tokenReference(handle, main_tokens[node], encoding, context, handler);
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,9 @@ pub const Initialize = struct {
|
|||||||
},
|
},
|
||||||
documentHighlight: Exists,
|
documentHighlight: Exists,
|
||||||
},
|
},
|
||||||
offsetEncoding: MaybeStringArray,
|
general: ?struct {
|
||||||
|
positionEncodings: MaybeStringArray,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
params: struct {
|
params: struct {
|
||||||
|
@ -173,22 +173,18 @@ const Builder = struct {
|
|||||||
|
|
||||||
while (i < to - 1 and source[i] != '\n') : (i += 1) {}
|
while (i < to - 1 and source[i] != '\n') : (i += 1) {}
|
||||||
|
|
||||||
const length = try offsets.lineSectionLength(self.handle.tree, comment_start, i, self.encoding);
|
const length = offsets.locLength(self.handle.tree.source, .{ .start = comment_start, .end = i }, self.encoding);
|
||||||
try self.addDirect(TokenType.comment, mods, comment_start, length);
|
try self.addDirect(TokenType.comment, mods, comment_start, length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
|
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
|
||||||
const delta = offsets.tokenRelativeLocation(
|
const text = self.handle.tree.source[self.previous_position..start];
|
||||||
self.handle.tree,
|
const delta = offsets.indexToPosition(text, text.len, self.encoding);
|
||||||
self.previous_position,
|
|
||||||
start,
|
|
||||||
self.encoding,
|
|
||||||
) catch return;
|
|
||||||
|
|
||||||
try self.arr.appendSlice(self.allocator, &.{
|
try self.arr.appendSlice(self.allocator, &.{
|
||||||
@truncate(u32, delta.line),
|
@truncate(u32, delta.line),
|
||||||
@truncate(u32, delta.column),
|
@truncate(u32, delta.character),
|
||||||
@truncate(u32, length),
|
@truncate(u32, length),
|
||||||
@enumToInt(tok_type),
|
@enumToInt(tok_type),
|
||||||
tok_mod.toInt(),
|
tok_mod.toInt(),
|
||||||
@ -406,7 +402,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
try writeToken(builder, node_data[node].rhs, .errorTag);
|
try writeToken(builder, node_data[node].rhs, .errorTag);
|
||||||
},
|
},
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const name = tree.getNodeSource(node);
|
const name = offsets.nodeToSlice(tree, node);
|
||||||
|
|
||||||
if (std.mem.eql(u8, name, "undefined")) {
|
if (std.mem.eql(u8, name, "undefined")) {
|
||||||
return try writeToken(builder, main_token, .keywordLiteral);
|
return try writeToken(builder, main_token, .keywordLiteral);
|
||||||
|
@ -5,8 +5,8 @@ const string = []const u8;
|
|||||||
// https://microsoft.github.io/language-server-protocol/specifications/specification-3-16/
|
// https://microsoft.github.io/language-server-protocol/specifications/specification-3-16/
|
||||||
|
|
||||||
pub const Position = struct {
|
pub const Position = struct {
|
||||||
line: i64,
|
line: u32,
|
||||||
character: i64,
|
character: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Range = struct {
|
pub const Range = struct {
|
||||||
@ -372,9 +372,24 @@ pub const InlayHintKind = enum(i64) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const PositionEncodingKind = enum {
|
||||||
|
utf8,
|
||||||
|
utf16,
|
||||||
|
utf32,
|
||||||
|
|
||||||
|
pub fn jsonStringify(value: PositionEncodingKind, options: std.json.StringifyOptions, out_stream: anytype) !void {
|
||||||
|
const str = switch (value) {
|
||||||
|
.utf8 => "utf-8",
|
||||||
|
.utf16 => "utf-16",
|
||||||
|
.utf32 => "utf-32",
|
||||||
|
};
|
||||||
|
try std.json.stringify(str, options, out_stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Only includes options we set in our initialize result.
|
// Only includes options we set in our initialize result.
|
||||||
const InitializeResult = struct {
|
const InitializeResult = struct {
|
||||||
offsetEncoding: string,
|
offsetEncoding: PositionEncodingKind,
|
||||||
capabilities: struct {
|
capabilities: struct {
|
||||||
signatureHelpProvider: struct {
|
signatureHelpProvider: struct {
|
||||||
triggerCharacters: []const string,
|
triggerCharacters: []const string,
|
||||||
|
@ -5,6 +5,7 @@ const helper = @import("helper");
|
|||||||
const Context = @import("context").Context;
|
const Context = @import("context").Context;
|
||||||
|
|
||||||
const types = zls.types;
|
const types = zls.types;
|
||||||
|
const offsets = zls.offsets;
|
||||||
const requests = zls.requests;
|
const requests = zls.requests;
|
||||||
|
|
||||||
const allocator: std.mem.Allocator = std.testing.allocator;
|
const allocator: std.mem.Allocator = std.testing.allocator;
|
||||||
@ -89,7 +90,7 @@ fn testInlayHints(source: []const u8) !void {
|
|||||||
|
|
||||||
const range = types.Range{
|
const range = types.Range{
|
||||||
.start = types.Position{ .line = 0, .character = 0 },
|
.start = types.Position{ .line = 0, .character = 0 },
|
||||||
.end = sourceIndexPosition(phr.source, phr.source.len),
|
.end = offsets.indexToPosition(phr.source, phr.source.len, .utf16),
|
||||||
};
|
};
|
||||||
|
|
||||||
const method = try std.json.stringifyAlloc(allocator, .{
|
const method = try std.json.stringifyAlloc(allocator, .{
|
||||||
@ -130,7 +131,7 @@ fn testInlayHints(source: []const u8) !void {
|
|||||||
outer: for (phr.placeholder_locations) |loc, i| {
|
outer: for (phr.placeholder_locations) |loc, i| {
|
||||||
const name = phr.placeholders[i].placeholderSlice(source);
|
const name = phr.placeholders[i].placeholderSlice(source);
|
||||||
|
|
||||||
const position = sourceIndexPosition(phr.source, loc);
|
const position = offsets.indexToPosition(phr.source, loc, .utf16);
|
||||||
|
|
||||||
for (hints) |hint| {
|
for (hints) |hint| {
|
||||||
if (position.line != hint.position.line or position.character != hint.position.character) continue;
|
if (position.line != hint.position.line or position.character != hint.position.character) continue;
|
||||||
@ -146,14 +147,3 @@ fn testInlayHints(source: []const u8) !void {
|
|||||||
return error.PlaceholderNotFound;
|
return error.PlaceholderNotFound;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sourceIndexPosition(source: []const u8, index: usize) types.Position {
|
|
||||||
const line = std.mem.count(u8, source[0..index], &.{'\n'});
|
|
||||||
const last_line_index = if (std.mem.lastIndexOfScalar(u8, source[0..index], '\n')) |idx| idx + 1 else 0;
|
|
||||||
const last_line_character = index - last_line_index;
|
|
||||||
|
|
||||||
return types.Position{
|
|
||||||
.line = @intCast(i64, line),
|
|
||||||
.character = @intCast(i64, last_line_character),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
comptime {
|
comptime {
|
||||||
_ = @import("sessions.zig");
|
_ = @import("sessions.zig");
|
||||||
|
_ = @import("utility/offsets.zig");
|
||||||
_ = @import("utility/position_context.zig");
|
_ = @import("utility/position_context.zig");
|
||||||
_ = @import("utility/uri.zig");
|
_ = @import("utility/uri.zig");
|
||||||
|
|
||||||
|
169
tests/utility/offsets.zig
Normal file
169
tests/utility/offsets.zig
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const zls = @import("zls");
|
||||||
|
|
||||||
|
const types = zls.types;
|
||||||
|
const offsets = zls.offsets;
|
||||||
|
|
||||||
|
test "offsets - index <-> Position" {
|
||||||
|
try testIndexPosition("", 0, 0, .{ 0, 0, 0 });
|
||||||
|
|
||||||
|
try testIndexPosition("hello from zig", 10, 0, .{ 10, 10, 10 });
|
||||||
|
|
||||||
|
try testIndexPosition("\n", 0, 0, .{ 0, 0, 0 });
|
||||||
|
try testIndexPosition("\n", 1, 1, .{ 0, 0, 0 });
|
||||||
|
|
||||||
|
try testIndexPosition("hello\nfrom\nzig\n", 5, 0, .{ 5, 5, 5 });
|
||||||
|
try testIndexPosition("hello\nfrom\nzig\n", 6, 1, .{ 0, 0, 0 });
|
||||||
|
try testIndexPosition("hello\nfrom\nzig\n", 8, 1, .{ 2, 2, 2 });
|
||||||
|
try testIndexPosition("\nhello\nfrom\nzig", 15, 3, .{ 3, 3, 3 });
|
||||||
|
|
||||||
|
try testIndexPosition("a¶↉🠁", 10, 0, .{ 10, 5, 4 });
|
||||||
|
try testIndexPosition("🇺🇸 🇩🇪", 17, 0, .{ 17, 9, 5 });
|
||||||
|
|
||||||
|
try testIndexPosition("a¶↉🠁\na¶↉🠁", 10, 0, .{ 10, 5, 4 });
|
||||||
|
try testIndexPosition("a¶↉🠁\na¶↉🠁", 11, 1, .{ 0, 0, 0 });
|
||||||
|
try testIndexPosition("a¶↉🠁\na¶↉🠁", 21, 1, .{ 10, 5, 4 });
|
||||||
|
|
||||||
|
try testIndexPosition("\na¶↉🠁", 4, 1, .{ 3, 2, 2 });
|
||||||
|
try testIndexPosition("a¶↉🠁\n", 6, 0, .{ 6, 3, 3 });
|
||||||
|
try testIndexPosition("a¶↉🠁\n", 11, 1, .{ 0, 0, 0 });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - tokenToLoc" {
|
||||||
|
try testTokenToLoc("foo", 0, 0, 3);
|
||||||
|
try testTokenToLoc("foo\n", 0, 0, 3);
|
||||||
|
try testTokenToLoc("\nfoo", 0, 1, 4);
|
||||||
|
try testTokenToLoc("foo:", 0, 0, 3);
|
||||||
|
try testTokenToLoc(";;", 1, 1, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - tokenIndexToLoc" {
|
||||||
|
try testTokenIndexToLoc("", 0, 0, 0);
|
||||||
|
try testTokenIndexToLoc("foo", 0, 0, 3);
|
||||||
|
try testTokenIndexToLoc("0, 0", 3, 3, 4);
|
||||||
|
try testTokenIndexToLoc(" bar ", 0, 1, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - lineLocAtIndex" {
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceAtIndex("", 0));
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceAtIndex("\n", 0));
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceAtIndex("\n", 1));
|
||||||
|
|
||||||
|
try std.testing.expectEqualStrings("foo", offsets.lineSliceAtIndex("foo\nbar", 2));
|
||||||
|
try std.testing.expectEqualStrings("bar", offsets.lineSliceAtIndex("foo\nbar", 4));
|
||||||
|
try std.testing.expectEqualStrings("bar", offsets.lineSliceAtIndex("foo\nbar", 6));
|
||||||
|
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceAtIndex("foo\n", 4));
|
||||||
|
try std.testing.expectEqualStrings("foo", offsets.lineSliceAtIndex("foo\n", 3));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - lineLocUntilIndex" {
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceUntilIndex("", 0));
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceUntilIndex("\n", 0));
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceUntilIndex("\n", 1));
|
||||||
|
|
||||||
|
try std.testing.expectEqualStrings("fo", offsets.lineSliceUntilIndex("foo\nbar", 2));
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceUntilIndex("foo\nbar", 4));
|
||||||
|
try std.testing.expectEqualStrings("ba", offsets.lineSliceUntilIndex("foo\nbar", 6));
|
||||||
|
|
||||||
|
try std.testing.expectEqualStrings("", offsets.lineSliceUntilIndex("foo\n", 4));
|
||||||
|
try std.testing.expectEqualStrings("foo", offsets.lineSliceUntilIndex("foo\n", 3));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - convertPositionEncoding" {
|
||||||
|
try testConvertPositionEncoding("", 0, 0, .{ 0, 0, 0 });
|
||||||
|
try testConvertPositionEncoding("\n", 0, 0, .{ 0, 0, 0 });
|
||||||
|
try testConvertPositionEncoding("\n", 1, 0, .{ 0, 0, 0 });
|
||||||
|
try testConvertPositionEncoding("foo", 0, 3, .{ 3, 3, 3 });
|
||||||
|
try testConvertPositionEncoding("a¶↉🠁", 0, 10, .{ 10, 5, 4 });
|
||||||
|
try testConvertPositionEncoding("a¶↉🠁\na¶↉🠁", 1, 6, .{ 6, 3, 3 });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - advancePosition" {
|
||||||
|
try testAdvancePosition("", 0, 0, 0, 0, 0, 0);
|
||||||
|
try testAdvancePosition("foo", 0, 3, 0, 0, 0, 3);
|
||||||
|
try testAdvancePosition("\n", 1, 0, 0, 0, 0, 1);
|
||||||
|
try testAdvancePosition("foo\nbar", 1, 2, 0, 1, 1, 6);
|
||||||
|
try testAdvancePosition("foo\nbar", 1, 3, 1, 0, 4, 7);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - countCodeUnits" {
|
||||||
|
try testCountCodeUnits("", .{ 0, 0, 0 });
|
||||||
|
try testCountCodeUnits("a\na", .{ 3, 3, 3 });
|
||||||
|
try testCountCodeUnits("a¶↉🠁", .{ 10, 5, 4 });
|
||||||
|
try testCountCodeUnits("🠁↉¶a", .{ 10, 5, 4 });
|
||||||
|
try testCountCodeUnits("🇺🇸 🇩🇪", .{ 17, 9, 5 });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "offsets - getNCodeUnitByteCount" {
|
||||||
|
try testGetNCodeUnitByteCount("", .{ 0, 0, 0 });
|
||||||
|
try testGetNCodeUnitByteCount("foo", .{ 2, 2, 2 });
|
||||||
|
try testGetNCodeUnitByteCount("a¶🠁🠁", .{ 7, 4, 3 });
|
||||||
|
try testGetNCodeUnitByteCount("🇺🇸 🇩🇪", .{ 9, 5, 3 });
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testIndexPosition(text: []const u8, index: usize, line: u32, characters: [3]u32) !void {
|
||||||
|
const position8: types.Position = .{ .line = line, .character = characters[0] };
|
||||||
|
const position16: types.Position = .{ .line = line, .character = characters[1] };
|
||||||
|
const position32: types.Position = .{ .line = line, .character = characters[2] };
|
||||||
|
|
||||||
|
try std.testing.expectEqual(position8, offsets.indexToPosition(text, index, .utf8));
|
||||||
|
try std.testing.expectEqual(position16, offsets.indexToPosition(text, index, .utf16));
|
||||||
|
try std.testing.expectEqual(position32, offsets.indexToPosition(text, index, .utf32));
|
||||||
|
|
||||||
|
try std.testing.expectEqual(index, offsets.positionToIndex(text, position8, .utf8));
|
||||||
|
try std.testing.expectEqual(index, offsets.positionToIndex(text, position16, .utf16));
|
||||||
|
try std.testing.expectEqual(index, offsets.positionToIndex(text, position32, .utf32));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testTokenToLoc(text: [:0]const u8, token_index: std.zig.Ast.TokenIndex, start: usize, end: usize) !void {
|
||||||
|
var tree = try std.zig.parse(std.testing.allocator, text);
|
||||||
|
defer tree.deinit(std.testing.allocator);
|
||||||
|
|
||||||
|
const actual = offsets.tokenToLoc(tree, token_index);
|
||||||
|
|
||||||
|
try std.testing.expectEqual(start, actual.start);
|
||||||
|
try std.testing.expectEqual(end, actual.end);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testTokenIndexToLoc(text: [:0]const u8, index: usize, start: usize, end: usize) !void {
|
||||||
|
const loc = offsets.tokenIndexToLoc(text, index);
|
||||||
|
|
||||||
|
try std.testing.expectEqual(start, loc.start);
|
||||||
|
try std.testing.expectEqual(end, loc.end);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testAdvancePosition(text: [:0]const u8, expected_line: u32, expected_character: u32, line: u32, character: u32, from: usize, to: usize) !void {
|
||||||
|
const expected: types.Position = .{ .line = expected_line, .character = expected_character };
|
||||||
|
const actual = offsets.advancePosition(text, .{ .line = line, .character = character }, from, to, .utf16);
|
||||||
|
|
||||||
|
try std.testing.expectEqual(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testConvertPositionEncoding(text: [:0]const u8, line: u32, character: u32, new_characters: [3]u32) !void {
|
||||||
|
const position: types.Position = .{ .line = line, .character = character };
|
||||||
|
|
||||||
|
const position8 = offsets.convertPositionEncoding(text, position, .utf8, .utf8);
|
||||||
|
const position16 = offsets.convertPositionEncoding(text, position, .utf8, .utf16);
|
||||||
|
const position32 = offsets.convertPositionEncoding(text, position, .utf8, .utf32);
|
||||||
|
|
||||||
|
try std.testing.expectEqual(line, position8.line);
|
||||||
|
try std.testing.expectEqual(line, position16.line);
|
||||||
|
try std.testing.expectEqual(line, position32.line);
|
||||||
|
|
||||||
|
try std.testing.expectEqual(new_characters[0], position8.character);
|
||||||
|
try std.testing.expectEqual(new_characters[1], position16.character);
|
||||||
|
try std.testing.expectEqual(new_characters[2], position32.character);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testCountCodeUnits(text: []const u8, counts: [3]usize) !void {
|
||||||
|
try std.testing.expectEqual(counts[0], offsets.countCodeUnits(text, .utf8));
|
||||||
|
try std.testing.expectEqual(counts[1], offsets.countCodeUnits(text, .utf16));
|
||||||
|
try std.testing.expectEqual(counts[2], offsets.countCodeUnits(text, .utf32));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testGetNCodeUnitByteCount(text: []const u8, n: [3]usize) !void {
|
||||||
|
try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[0], .utf8));
|
||||||
|
try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[1], .utf16));
|
||||||
|
try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[2], .utf32));
|
||||||
|
}
|
@ -201,6 +201,18 @@ test "position context - label" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "position context - empty" {
|
test "position context - empty" {
|
||||||
|
try testContext(
|
||||||
|
\\<cursor>
|
||||||
|
,
|
||||||
|
.empty,
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
try testContext(
|
||||||
|
\\<cursor>const foo = struct {};
|
||||||
|
,
|
||||||
|
.empty,
|
||||||
|
null,
|
||||||
|
);
|
||||||
try testContext(
|
try testContext(
|
||||||
\\try foo(arg, slice[<cursor>]);
|
\\try foo(arg, slice[<cursor>]);
|
||||||
,
|
,
|
||||||
@ -245,18 +257,14 @@ fn testContext(comptime line: []const u8, comptime tag: std.meta.Tag(analysis.Po
|
|||||||
const doc = try makeDocument("", line);
|
const doc = try makeDocument("", line);
|
||||||
defer freeDocument(doc);
|
defer freeDocument(doc);
|
||||||
|
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
const ctx = try analysis.getPositionContext(allocator, doc, cursor_idx);
|
||||||
defer arena.deinit();
|
|
||||||
|
|
||||||
const p = try offsets.documentPosition(doc, .{ .line = 0, .character = @intCast(i64, cursor_idx) }, .utf8);
|
|
||||||
const ctx = try analysis.documentPositionContext(&arena, doc, p);
|
|
||||||
|
|
||||||
if (std.meta.activeTag(ctx) != tag) {
|
if (std.meta.activeTag(ctx) != tag) {
|
||||||
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
|
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
|
||||||
return error.DifferentTag;
|
return error.DifferentTag;
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual_range = ctx.range() orelse if(maybe_range) |expected_range| {
|
const actual_loc = ctx.loc() orelse if(maybe_range) |expected_range| {
|
||||||
std.debug.print("Expected `{s}`, got null range\n", .{
|
std.debug.print("Expected `{s}`, got null range\n", .{
|
||||||
expected_range,
|
expected_range,
|
||||||
});
|
});
|
||||||
@ -265,7 +273,7 @@ fn testContext(comptime line: []const u8, comptime tag: std.meta.Tag(analysis.Po
|
|||||||
|
|
||||||
const expected_range = maybe_range orelse {
|
const expected_range = maybe_range orelse {
|
||||||
std.debug.print("Expected null range, got `{s}`\n", .{
|
std.debug.print("Expected null range, got `{s}`\n", .{
|
||||||
doc.text[actual_range.start..actual_range.end],
|
doc.text[actual_loc.start..actual_loc.end],
|
||||||
});
|
});
|
||||||
return error.DifferentRange;
|
return error.DifferentRange;
|
||||||
};
|
};
|
||||||
@ -273,10 +281,10 @@ fn testContext(comptime line: []const u8, comptime tag: std.meta.Tag(analysis.Po
|
|||||||
const expected_range_start = comptime std.mem.indexOf(u8, final_line, expected_range).?;
|
const expected_range_start = comptime std.mem.indexOf(u8, final_line, expected_range).?;
|
||||||
const expected_range_end = expected_range_start + expected_range.len;
|
const expected_range_end = expected_range_start + expected_range.len;
|
||||||
|
|
||||||
if (expected_range_start != actual_range.start or expected_range_end != actual_range.end) {
|
if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) {
|
||||||
std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{
|
std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{
|
||||||
doc.text[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
|
doc.text[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
|
||||||
doc.text[actual_range.start..actual_range.end], actual_range.start, actual_range.end,
|
doc.text[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
|
||||||
});
|
});
|
||||||
return error.DifferentRange;
|
return error.DifferentRange;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user