remove types.TextDocument (#693)

* remove types.TextDocument

* resolve compile errors because of previous merge
This commit is contained in:
Techatrix 2022-10-01 02:45:45 +02:00 committed by GitHub
parent a8fa5c68a7
commit a9a1fad13e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 122 additions and 215 deletions

View File

@ -1,6 +1,7 @@
const std = @import("std");
const builtin = @import("builtin");
const types = @import("types.zig");
const requests = @import("requests.zig");
const URI = @import("uri.zig");
const analysis = @import("analysis.zig");
const offsets = @import("offsets.zig");
@ -62,23 +63,20 @@ pub const BuildFileConfig = struct {
};
pub const Handle = struct {
document: types.TextDocument,
count: usize,
uri: []const u8,
text: [:0]const u8,
tree: Ast,
document_scope: analysis.DocumentScope,
/// Contains one entry for every import in the document
import_uris: []const []const u8,
/// Contains one entry for every cimport in the document
cimports: []CImportHandle,
/// Items in this array list come from `import_uris` and `cimports`
imports_used: std.ArrayListUnmanaged([]const u8),
tree: Ast,
document_scope: analysis.DocumentScope,
associated_build_file: ?*BuildFile,
is_build_file: ?*BuildFile,
pub fn uri(handle: Handle) []const u8 {
return handle.document.uri;
}
};
pub const UriToHandleMap = std.StringHashMapUnmanaged(*Handle);
@ -462,9 +460,9 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
defer {
if (handle.associated_build_file) |build_file| {
log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri(), build_file.uri });
log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri, build_file.uri });
} else {
log.debug("Opened document `{s}` without a build file", .{handle.uri()});
log.debug("Opened document `{s}` without a build file", .{handle.uri});
}
}
@ -476,17 +474,13 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
handle.* = Handle{
.count = 1,
.uri = uri,
.text = text,
.tree = tree,
.document_scope = document_scope,
.import_uris = &.{},
.cimports = &.{},
.imports_used = .{},
.document = .{
.uri = uri,
.text = text,
// Extra +1 to include the null terminator
.mem = text.ptr[0 .. text.len + 1],
},
.tree = tree,
.document_scope = document_scope,
.associated_build_file = null,
.is_build_file = null,
};
@ -648,7 +642,7 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
}
handle.tree.deinit(self.allocator);
self.allocator.free(handle.document.mem);
self.allocator.free(handle.text);
for (handle.imports_used.items) |import_uri| {
self.decrementCount(import_uri);
@ -819,9 +813,9 @@ fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) error{Ou
}
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
log.debug("New text for document {s}", .{handle.uri()});
log.debug("New text for document {s}", .{handle.uri});
handle.tree.deinit(self.allocator);
handle.tree = try std.zig.parse(self.allocator, handle.document.text);
handle.tree = try std.zig.parse(self.allocator, handle.text);
handle.document_scope.deinit(self.allocator);
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
@ -946,70 +940,41 @@ pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
}
}
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array, offset_encoding: offsets.Encoding) !void {
const document = &handle.document;
for (content_changes.items) |change| {
if (change.Object.get("range")) |range| {
std.debug.assert(@ptrCast([*]const u8, document.text.ptr) == document.mem.ptr);
// TODO: add tests and validate the JSON
const start_obj = range.Object.get("start").?.Object;
const start_pos = types.Position{
.line = @intCast(u32, start_obj.get("line").?.Integer),
.character = @intCast(u32, start_obj.get("character").?.Integer),
};
const end_obj = range.Object.get("end").?.Object;
const end_pos = types.Position{
.line = @intCast(u32, end_obj.get("line").?.Integer),
.character = @intCast(u32, end_obj.get("character").?.Integer),
};
const change_text = change.Object.get("text").?.String;
const start_index = offsets.positionToIndex(document.text, start_pos, offset_encoding);
const end_index = offsets.positionToIndex(document.text, end_pos, offset_encoding);
const old_len = document.text.len;
const new_len = old_len - (end_index - start_index) + change_text.len;
if (new_len >= document.mem.len) {
// We need to reallocate memory.
// We reallocate twice the current filesize or the new length, if it's more than that
// so that we can reduce the amount of realloc calls.
// We can tune this to find a better size if needed.
const realloc_len = std.math.max(2 * old_len, new_len + 1);
document.mem = try self.allocator.realloc(document.mem, realloc_len);
}
// The first part of the string, [0 .. start_index] need not be changed.
// We then copy the last part of the string, [end_index ..] to its
// new position, [start_index + change_len .. ]
if (new_len < old_len) {
std.mem.copy(u8, document.mem[start_index + change_text.len ..][0 .. old_len - end_index], document.mem[end_index..old_len]);
} else {
std.mem.copyBackwards(u8, document.mem[start_index + change_text.len ..][0 .. old_len - end_index], document.mem[end_index..old_len]);
}
// Finally, we copy the changes over.
std.mem.copy(u8, document.mem[start_index..][0..change_text.len], change_text);
// Reset the text substring.
document.mem[new_len] = 0;
document.text = document.mem[0..new_len :0];
} else {
const change_text = change.Object.get("text").?.String;
const old_len = document.text.len;
if (change_text.len >= document.mem.len) {
// Like above.
const realloc_len = std.math.max(2 * old_len, change_text.len + 1);
document.mem = try self.allocator.realloc(document.mem, realloc_len);
}
std.mem.copy(u8, document.mem[0..change_text.len], change_text);
document.mem[change_text.len] = 0;
document.text = document.mem[0..change_text.len :0];
pub fn applyChanges(
self: *DocumentStore,
handle: *Handle,
content_changes: []const requests.TextDocumentContentChangeEvent,
encoding: offsets.Encoding,
) !void {
var last_full_text_change: ?usize = null;
var i: usize = content_changes.len;
while (i > 0) {
i -= 1;
if (content_changes[i].range == null) {
last_full_text_change = i;
}
}
var text_array = std.ArrayListUnmanaged(u8){};
errdefer text_array.deinit(self.allocator);
try text_array.appendSlice(self.allocator, if (last_full_text_change) |index| content_changes[index].text else handle.text);
// don't even bother applying changes before a full text change
const changes = content_changes[if (last_full_text_change) |index| index + 1 else 0..];
for (changes) |item| {
const range = item.range.?; // every element is guaranteed to have `range` set
const text = item.text;
const loc = offsets.rangeToLoc(text_array.items, range, encoding);
try text_array.replaceRange(self.allocator, loc.start, loc.end - loc.start, text);
}
const new_text = try text_array.toOwnedSliceSentinel(self.allocator, 0);
self.allocator.free(handle.text);
handle.text = new_text;
try self.refreshDocument(handle);
}
@ -1039,7 +1004,7 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand
}
return null;
} else {
const base = handle.uri();
const base = handle.uri;
var base_len = base.len;
while (base[base_len - 1] != '/' and base_len > 0) {
base_len -= 1;
@ -1188,7 +1153,7 @@ pub fn deinit(self: *DocumentStore) void {
while (entry_iterator.next()) |entry| {
entry.value_ptr.*.document_scope.deinit(self.allocator);
entry.value_ptr.*.tree.deinit(self.allocator);
self.allocator.free(entry.value_ptr.*.document.mem);
self.allocator.free(entry.value_ptr.*.text);
for (entry.value_ptr.*.import_uris) |uri| {
self.allocator.free(uri);
}

View File

@ -274,7 +274,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.H
.method = "textDocument/publishDiagnostics",
.params = .{
.PublishDiagnostics = .{
.uri = handle.uri(),
.uri = handle.uri,
.diagnostics = diagnostics.items,
},
},
@ -298,7 +298,7 @@ fn getAstCheckDiagnostics(
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
return;
};
try process.stdin.?.writeAll(handle.document.text);
try process.stdin.?.writeAll(handle.text);
process.stdin.?.close();
process.stdin = null;
@ -329,8 +329,8 @@ fn getAstCheckDiagnostics(
};
// zig uses utf-8 encoding for character offsets
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
const position = offsets.convertPositionEncoding(handle.text, utf8_position, .utf8, server.offset_encoding);
const range = offsets.tokenPositionToRange(handle.text, position, server.offset_encoding);
const msg = pos_and_diag_iterator.rest()[1..];
@ -351,7 +351,7 @@ fn getAstCheckDiagnostics(
try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1);
const location = types.Location{
.uri = handle.uri(),
.uri = handle.uri,
.range = range,
};
@ -645,22 +645,20 @@ fn nodeToCompletion(
}
pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []const u8 {
const text = handle.document.text;
if (pos_index + 1 >= text.len) return "";
if (pos_index + 1 >= handle.text.len) return "";
var start_idx = pos_index;
while (start_idx > 0 and isSymbolChar(text[start_idx - 1])) {
while (start_idx > 0 and isSymbolChar(handle.text[start_idx - 1])) {
start_idx -= 1;
}
var end_idx = pos_index;
while (end_idx < handle.document.text.len and isSymbolChar(text[end_idx])) {
while (end_idx < handle.text.len and isSymbolChar(handle.text[end_idx])) {
end_idx += 1;
}
if (end_idx <= start_idx) return "";
return text[start_idx..end_idx];
return handle.text[start_idx..end_idx];
}
fn isSymbolChar(char: u8) bool {
@ -693,7 +691,7 @@ fn gotoDefinitionSymbol(
};
return types.Location{
.uri = handle.document.uri,
.uri = handle.uri,
.range = offsets.tokenToRange(handle.tree, name_token, server.offset_encoding),
};
}
@ -912,12 +910,10 @@ fn getSymbolFieldAccess(
const name = identifierFromPosition(source_index, handle.*);
if (name.len == 0) return null;
var held_range = handle.document.borrowNullTerminatedSlice(loc.start, loc.end);
var tokenizer = std.zig.Tokenizer.init(held_range.data());
var held_range = try server.arena.allocator().dupeZ(u8, offsets.locToSlice(handle.text, loc));
var tokenizer = std.zig.Tokenizer.init(held_range);
errdefer held_range.release();
if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) |result| {
held_range.release();
const container_handle = result.unwrapped orelse result.original;
const container_handle_node = switch (container_handle.type.data) {
.other => |n| n,
@ -1144,21 +1140,22 @@ fn completeFieldAccess(server: *Server, handle: *DocumentStore.Handle, source_in
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const allocator = server.arena.allocator();
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
var held_range = handle.document.borrowNullTerminatedSlice(loc.start, loc.end);
defer held_range.release();
var tokenizer = std.zig.Tokenizer.init(held_range.data());
var held_loc = try allocator.dupeZ(u8, offsets.locToSlice(handle.text, loc));
var tokenizer = std.zig.Tokenizer.init(held_loc);
const result = (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) orelse return null;
try server.typeToCompletion(&completions, result, handle);
if (server.client_capabilities.label_details_support) {
for (completions.items) |*item| {
try formatDetailledLabel(item, server.arena.allocator());
try formatDetailledLabel(item, allocator);
}
}
return completions.toOwnedSlice(server.arena.allocator());
return completions.toOwnedSlice(allocator);
}
fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) !void {
@ -1375,7 +1372,7 @@ fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, handle: *Docume
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
fsc: {
var document_path = try uri_utils.parse(allocator, handle.uri());
var document_path = try uri_utils.parse(allocator, handle.uri);
var document_dir_path = std.fs.openIterableDirAbsolute(std.fs.path.dirname(document_path) orelse break :fsc, .{}) catch break :fsc;
defer document_dir_path.close();
@ -1712,7 +1709,7 @@ fn changeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId,
return;
};
try server.document_store.applyChanges(handle, req.params.contentChanges.Array, server.offset_encoding);
try server.document_store.applyChanges(handle, req.params.contentChanges, server.offset_encoding);
try server.publishDiagnostics(writer, handle);
}
@ -1826,8 +1823,8 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
});
}
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
const maybe_completions = switch (pos_context) {
.builtin => server.builtin_completions.items,
@ -1888,7 +1885,7 @@ fn signatureHelpHandler(server: *Server, writer: anytype, id: types.RequestId, r
if (req.params.position.character == 0)
return try respondGeneric(writer, id, no_signatures_response);
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding);
if (try getSignatureInfo(
&server.document_store,
&server.arena,
@ -1921,8 +1918,8 @@ fn gotoHandler(server: *Server, writer: anytype, id: types.RequestId, req: reque
if (req.params.position.character == 0) return try respondGeneric(writer, id, null_result_response);
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
const maybe_location = switch (pos_context) {
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
@ -1965,8 +1962,8 @@ fn hoverHandler(server: *Server, writer: anytype, id: types.RequestId, req: requ
if (req.params.position.character == 0) return try respondGeneric(writer, id, null_result_response);
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
const maybe_hover = switch (pos_context) {
.builtin => try server.hoverDefinitionBuiltin(source_index, handle),
@ -2013,7 +2010,7 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req:
log.warn("Failed to spawn zig fmt process, error: {}", .{err});
return try respondGeneric(writer, id, null_result_response);
};
try process.stdin.?.writeAll(handle.document.text);
try process.stdin.?.writeAll(handle.text);
process.stdin.?.close();
process.stdin = null;
@ -2022,10 +2019,10 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req:
switch (try process.wait()) {
.Exited => |code| if (code == 0) {
if (std.mem.eql(u8, handle.document.text, stdout_bytes)) return try respondGeneric(writer, id, null_result_response);
if (std.mem.eql(u8, handle.text, stdout_bytes)) return try respondGeneric(writer, id, null_result_response);
var edits = diff.edits(server.arena.allocator(), handle.document.text, stdout_bytes) catch {
const range = offsets.locToRange(handle.document.text, .{ .start = 0, .end = handle.document.text.len }, server.offset_encoding);
var edits = diff.edits(server.arena.allocator(), handle.text, stdout_bytes) catch {
const range = offsets.locToRange(handle.text, .{ .start = 0, .end = handle.text.len }, server.offset_encoding);
// If there was an error trying to diff the text, return the formatted response
// as the new text for the entire range of the document
return try send(writer, server.arena.allocator(), types.Response{
@ -2148,8 +2145,8 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI
if (req.position().character <= 0) return try respondGeneric(writer, id, null_result_response);
const source_index = offsets.positionToIndex(handle.document.text, req.position(), server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
const source_index = offsets.positionToIndex(handle.text, req.position(), server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index);
const decl = switch (pos_context) {
.var_access => try server.getSymbolGlobal(source_index, handle),
@ -2191,7 +2188,7 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI
.references => .{ .Locations = locations.items },
.highlight => blk: {
var highlights = try std.ArrayListUnmanaged(types.DocumentHighlight).initCapacity(allocator, locations.items.len);
const uri = handle.uri();
const uri = handle.uri;
for (locations.items) |loc| {
if (!std.mem.eql(u8, loc.uri, uri)) continue;
highlights.appendAssumeCapacity(.{

View File

@ -1454,18 +1454,19 @@ fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc {
};
}
pub fn getPositionContext(allocator: std.mem.Allocator, document: types.TextDocument, doc_index: usize) !PositionContext {
const line_loc = offsets.lineLocUntilIndex(document.text, doc_index);
const line = offsets.locToSlice(document.text, line_loc);
pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_index: usize) !PositionContext {
const line_loc = offsets.lineLocUntilIndex(text, doc_index);
const line = offsets.locToSlice(text, line_loc);
var stack = try std.ArrayListUnmanaged(StackState).initCapacity(allocator, 8);
defer stack.deinit(allocator);
{
var held_line = document.borrowNullTerminatedSlice(0, line_loc.end);
defer held_line.release();
var held_line = try allocator.dupeZ(u8, text[0..line_loc.end]);
defer allocator.free(held_line);
var tokenizer: std.zig.Tokenizer = .{
.buffer = held_line.data(),
.buffer = held_line,
.index = line_loc.start,
.pending_invalid_token = null,
};
@ -1592,14 +1593,14 @@ pub fn getPositionContext(allocator: std.mem.Allocator, document: types.TextDocu
if (line.len == 0) return .empty;
var held_line = document.borrowNullTerminatedSlice(line_loc.start, line_loc.end);
defer held_line.release();
var held_line = try allocator.dupeZ(u8, offsets.locToSlice(text, line_loc));
defer allocator.free(held_line);
switch (line[0]) {
'a'...'z', 'A'...'Z', '_', '@' => {},
else => break :block .empty,
}
var tokenizer = std.zig.Tokenizer.init(held_line.data());
var tokenizer = std.zig.Tokenizer.init(held_line);
const tok = tokenizer.next();
if (tok.tag == .identifier) {
break :block PositionContext{ .var_access = tok.loc };

View File

@ -22,7 +22,7 @@ pub const Builder = struct {
) error{OutOfMemory}!void {
const kind = DiagnosticKind.parse(diagnostic.message) orelse return;
const loc = offsets.rangeToLoc(builder.text(), diagnostic.range, builder.offset_encoding);
const loc = offsets.rangeToLoc(builder.handle.text, diagnostic.range, builder.offset_encoding);
switch (kind) {
.unused => |id| switch (id) {
@ -49,12 +49,12 @@ pub const Builder = struct {
}
pub fn createTextEditLoc(self: *Builder, loc: offsets.Loc, new_text: []const u8) types.TextEdit {
const range = offsets.locToRange(self.text(), loc, self.offset_encoding);
const range = offsets.locToRange(self.handle.text, loc, self.offset_encoding);
return types.TextEdit{ .range = range, .newText = new_text };
}
pub fn createTextEditPos(self: *Builder, index: usize, new_text: []const u8) types.TextEdit {
const position = offsets.indexToPosition(self.text(), index, self.offset_encoding);
const position = offsets.indexToPosition(self.handle.text, index, self.offset_encoding);
return types.TextEdit{ .range = .{ .start = position, .end = position }, .newText = new_text };
}
@ -63,18 +63,14 @@ pub const Builder = struct {
try text_edits.appendSlice(self.arena.allocator(), edits);
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
try workspace_edit.changes.putNoClobber(self.arena.allocator(), self.handle.uri(), text_edits);
try workspace_edit.changes.putNoClobber(self.arena.allocator(), self.handle.uri, text_edits);
return workspace_edit;
}
fn text(self: *Builder) []const u8 {
return self.handle.document.text;
}
};
fn handleNonCamelcaseFunction(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const identifier_name = offsets.locToSlice(builder.text(), loc);
const identifier_name = offsets.locToSlice(builder.handle.text, loc);
if (std.mem.allEqual(u8, identifier_name, '_')) return;
@ -91,7 +87,7 @@ fn handleNonCamelcaseFunction(builder: *Builder, actions: *std.ArrayListUnmanage
}
fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const identifier_name = offsets.locToSlice(builder.text(), loc);
const identifier_name = offsets.locToSlice(builder.handle.text, loc);
const tree = builder.handle.tree;
const node_tags = tree.nodes.items(.tag);
@ -117,7 +113,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman
const block = node_datas[payload.func].rhs;
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[node_tokens[payload.func]]).len;
const indent = offsets.lineSliceUntilIndex(builder.handle.text, token_starts[node_tokens[payload.func]]).len;
const new_text = try createDiscardText(builder.arena.allocator(), identifier_name, indent + 4);
const index = token_starts[node_tokens[block]] + 1;
@ -141,7 +137,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman
}
fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const identifier_name = offsets.locToSlice(builder.text(), loc);
const identifier_name = offsets.locToSlice(builder.handle.text, loc);
const tree = builder.handle.tree;
const token_tags = tree.tokens.items(.tag);
@ -163,7 +159,7 @@ fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnma
const first_token = tree.firstToken(node);
const last_token = ast.lastToken(tree, node) + 1;
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[first_token]).len;
const indent = offsets.lineSliceUntilIndex(builder.handle.text, token_starts[first_token]).len;
if (token_tags[last_token] != .semicolon) return;
@ -180,11 +176,11 @@ fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnma
}
fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const capture_locs = getCaptureLoc(builder.text(), loc, true) orelse return;
const capture_locs = getCaptureLoc(builder.handle.text, loc, true) orelse return;
// TODO support discarding without modifying the capture
// by adding a discard in the block scope
const is_value_discarded = std.mem.eql(u8, offsets.locToSlice(builder.text(), capture_locs.value), "_");
const is_value_discarded = std.mem.eql(u8, offsets.locToSlice(builder.handle.text, capture_locs.value), "_");
if (is_value_discarded) {
// |_, i| ->
// TODO fix formatting
@ -210,7 +206,7 @@ fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(
}
fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const capture_locs = getCaptureLoc(builder.text(), loc, false) orelse return;
const capture_locs = getCaptureLoc(builder.handle.text, loc, false) orelse return;
// TODO support discarding without modifying the capture
// by adding a discard in the block scope
@ -235,7 +231,7 @@ fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types
}
fn handlePointlessDiscard(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const edit_loc = getDiscardLoc(builder.text(), loc) orelse return;
const edit_loc = getDiscardLoc(builder.handle.text, loc) orelse return;
try actions.append(builder.arena.allocator(), .{
.title = "remove pointless discard",

View File

@ -212,14 +212,12 @@ fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
const start = offsets.tokenToIndex(tree, lhsToken);
const rhs_loc = offsets.tokenToLoc(tree, rhsToken);
var held_range = handle.document.borrowNullTerminatedSlice(start, rhs_loc.end);
var tokenizer = std.zig.Tokenizer.init(held_range.data());
var held_range = try arena.allocator().dupeZ(u8, handle.text[start..rhs_loc.end]);
var tokenizer = std.zig.Tokenizer.init(held_range);
// note: we have the ast node, traversing it would probably yield better results
// than trying to re-tokenize and re-parse it
errdefer held_range.release();
if (try analysis.getFieldAccessType(store, arena, handle, rhs_loc.end, &tokenizer)) |result| {
held_range.release();
const container_handle = result.unwrapped orelse result.original;
switch (container_handle.type.data) {
.other => |container_handle_node| {

View File

@ -29,7 +29,7 @@ pub fn labelReferences(
if (include_decl) {
// The first token is always going to be the label
try locations.append(allocator, .{
.uri = handle.uri(),
.uri = handle.uri,
.range = offsets.tokenToRange(handle.tree, first_tok, encoding),
});
}
@ -45,7 +45,7 @@ pub fn labelReferences(
if (!std.mem.eql(u8, tree.tokenSlice(curr_tok + 2), tree.tokenSlice(first_tok))) continue;
try locations.append(allocator, .{
.uri = handle.uri(),
.uri = handle.uri,
.range = offsets.tokenToRange(handle.tree, curr_tok + 2, encoding),
});
}
@ -72,7 +72,7 @@ const Builder = struct {
pub fn add(self: *Builder, handle: *DocumentStore.Handle, token_index: Ast.TokenIndex) !void {
try self.locations.append(self.arena.allocator(), .{
.uri = handle.uri(),
.uri = handle.uri,
.range = offsets.tokenToRange(handle.tree, token_index, self.encoding),
});
}

View File

@ -204,10 +204,15 @@ const TextDocumentIdentifier = struct {
pub const ChangeDocument = struct {
params: struct {
textDocument: TextDocumentIdentifier,
contentChanges: std.json.Value,
contentChanges: []TextDocumentContentChangeEvent,
},
};
pub const TextDocumentContentChangeEvent = struct {
range: ?types.Range,
text: []const u8,
};
const TextDocumentIdentifierRequest = struct {
params: struct {
textDocument: TextDocumentIdentifier,

View File

@ -251,11 +251,10 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
const last_token_slice = tree.tokenSlice(expr_last_token);
const expr_end = token_starts[expr_last_token] + last_token_slice.len;
var held_expr = handle.document.borrowNullTerminatedSlice(expr_start, expr_end);
defer held_expr.release();
var held_expr = try alloc.dupeZ(u8, handle.text[expr_start..expr_end]);
// Resolve the expression.
var tokenizer = std.zig.Tokenizer.init(held_expr.data());
var tokenizer = std.zig.Tokenizer.init(held_expr);
if (try analysis.getFieldAccessType(
document_store,
arena,

View File

@ -141,41 +141,6 @@ pub const Diagnostic = struct {
relatedInformation: ?[]DiagnosticRelatedInformation = null,
};
pub const TextDocument = struct {
uri: string,
// This is a substring of mem starting at 0
text: [:0]const u8,
// This holds the memory that we have actually allocated.
mem: []u8,
const Held = struct {
document: *const TextDocument,
popped: u8,
start_index: usize,
end_index: usize,
pub fn data(self: @This()) [:0]const u8 {
return self.document.mem[self.start_index..self.end_index :0];
}
pub fn release(self: *@This()) void {
self.document.mem[self.end_index] = self.popped;
}
};
pub fn borrowNullTerminatedSlice(self: *const @This(), start_idx: usize, end_idx: usize) Held {
std.debug.assert(end_idx >= start_idx);
const popped_char = self.mem[end_idx];
self.mem[end_idx] = 0;
return .{
.document = self,
.popped = popped_char,
.start_index = start_idx,
.end_index = end_idx,
};
}
};
pub const WorkspaceEdit = struct {
changes: std.StringHashMapUnmanaged(std.ArrayListUnmanaged(TextEdit)),

View File

@ -233,31 +233,12 @@ test "position context - empty" {
);
}
fn makeDocument(uri: []const u8, text: []const u8) !types.TextDocument {
const mem = try allocator.alloc(u8, text.len + 1);
std.mem.copy(u8, mem, text);
mem[text.len] = 0;
return types.TextDocument{
.uri = uri,
.mem = mem,
.text = mem[0..text.len :0],
};
}
fn freeDocument(doc: types.TextDocument) void {
allocator.free(doc.text);
}
fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), maybe_range: ?[]const u8) !void {
const cursor_idx = std.mem.indexOf(u8, line, "<cursor>").?;
const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] });
defer allocator.free(final_line);
const doc = try makeDocument("", line);
defer freeDocument(doc);
const ctx = try analysis.getPositionContext(allocator, doc, cursor_idx);
const ctx = try analysis.getPositionContext(allocator, line, cursor_idx);
if (std.meta.activeTag(ctx) != tag) {
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
@ -273,7 +254,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
const expected_range = maybe_range orelse {
std.debug.print("Expected null range, got `{s}`\n", .{
doc.text[actual_loc.start..actual_loc.end],
line[actual_loc.start..actual_loc.end],
});
return error.DifferentRange;
};
@ -283,8 +264,8 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) {
std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{
doc.text[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
doc.text[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
line[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
});
return error.DifferentRange;
}