Fix build for zig master by inserting null terminators when tokenizing and resetting them when needed
This commit is contained in:
parent
d38d67ab7d
commit
bba069526c
@ -1525,8 +1525,16 @@ pub fn documentPositionContext(
|
|||||||
_ = document;
|
_ = document;
|
||||||
|
|
||||||
const line = doc_position.line;
|
const line = doc_position.line;
|
||||||
var tokenizer = std.zig.Tokenizer.init(line[0..doc_position.line_index]);
|
|
||||||
|
const line_mem_start = @ptrToInt(line.ptr) - @ptrToInt(document.mem.ptr);
|
||||||
var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8);
|
var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8);
|
||||||
|
{
|
||||||
|
var held_line = document.borrowNullTerminatedSlice(
|
||||||
|
line_mem_start,
|
||||||
|
line_mem_start + doc_position.line_index,
|
||||||
|
);
|
||||||
|
defer held_line.release();
|
||||||
|
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const tok = tokenizer.next();
|
const tok = tokenizer.next();
|
||||||
@ -1611,6 +1619,7 @@ pub fn documentPositionContext(
|
|||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return block: {
|
return block: {
|
||||||
if (stack.popOrNull()) |state| {
|
if (stack.popOrNull()) |state| {
|
||||||
@ -1628,11 +1637,17 @@ pub fn documentPositionContext(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (doc_position.line_index < line.len) {
|
if (doc_position.line_index < line.len) {
|
||||||
|
var held_line = document.borrowNullTerminatedSlice(
|
||||||
|
line_mem_start + doc_position.line_index,
|
||||||
|
line_mem_start + line.len,
|
||||||
|
);
|
||||||
|
defer held_line.release();
|
||||||
|
|
||||||
switch (line[doc_position.line_index]) {
|
switch (line[doc_position.line_index]) {
|
||||||
'a'...'z', 'A'...'Z', '_', '@' => {},
|
'a'...'z', 'A'...'Z', '_', '@' => {},
|
||||||
else => break :block .empty,
|
else => break :block .empty,
|
||||||
}
|
}
|
||||||
tokenizer = std.zig.Tokenizer.init(line[doc_position.line_index..]);
|
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
||||||
const tok = tokenizer.next();
|
const tok = tokenizer.next();
|
||||||
if (tok.tag == .identifier)
|
if (tok.tag == .identifier)
|
||||||
break :block PositionContext{ .var_access = tok.loc };
|
break :block PositionContext{ .var_access = tok.loc };
|
||||||
|
@ -177,7 +177,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
|||||||
|
|
||||||
/// This function asserts the document is not open yet and takes ownership
|
/// This function asserts the document is not open yet and takes ownership
|
||||||
/// of the uri and text passed in.
|
/// of the uri and text passed in.
|
||||||
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Handle {
|
fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Handle {
|
||||||
log.debug("Opened document: {s}", .{uri});
|
log.debug("Opened document: {s}", .{uri});
|
||||||
|
|
||||||
var handle = try self.allocator.create(Handle);
|
var handle = try self.allocator.create(Handle);
|
||||||
@ -297,7 +297,13 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read the build file, create a new document, set the candidate to the new build file.
|
// Read the build file, create a new document, set the candidate to the new build file.
|
||||||
const build_file_text = try build_file.readToEndAlloc(self.allocator, std.math.maxInt(usize));
|
const build_file_text = try build_file.readToEndAllocOptions(
|
||||||
|
self.allocator,
|
||||||
|
std.math.maxInt(usize),
|
||||||
|
null,
|
||||||
|
@alignOf(u8),
|
||||||
|
0,
|
||||||
|
);
|
||||||
errdefer self.allocator.free(build_file_text);
|
errdefer self.allocator.free(build_file_text);
|
||||||
|
|
||||||
const build_file_handle = try self.newDocument(build_file_uri, build_file_text);
|
const build_file_handle = try self.newDocument(build_file_uri, build_file_text);
|
||||||
@ -336,7 +342,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
|
|||||||
return entry.value_ptr.*;
|
return entry.value_ptr.*;
|
||||||
}
|
}
|
||||||
|
|
||||||
const duped_text = try std.mem.dupe(self.allocator, u8, text);
|
const duped_text = try std.mem.dupeZ(self.allocator, u8, text);
|
||||||
errdefer self.allocator.free(duped_text);
|
errdefer self.allocator.free(duped_text);
|
||||||
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
|
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
|
||||||
errdefer self.allocator.free(duped_uri);
|
errdefer self.allocator.free(duped_uri);
|
||||||
@ -505,7 +511,7 @@ pub fn applyChanges(
|
|||||||
|
|
||||||
for (content_changes.items) |change| {
|
for (content_changes.items) |change| {
|
||||||
if (change.Object.get("range")) |range| {
|
if (change.Object.get("range")) |range| {
|
||||||
std.debug.assert(document.text.ptr == document.mem.ptr);
|
std.debug.assert(@ptrCast([*]const u8, document.text.ptr) == document.mem.ptr);
|
||||||
|
|
||||||
// TODO: add tests and validate the JSON
|
// TODO: add tests and validate the JSON
|
||||||
const start_obj = range.Object.get("start").?.Object;
|
const start_obj = range.Object.get("start").?.Object;
|
||||||
@ -525,12 +531,12 @@ pub fn applyChanges(
|
|||||||
|
|
||||||
const old_len = document.text.len;
|
const old_len = document.text.len;
|
||||||
const new_len = old_len - (end_index - start_index) + change_text.len;
|
const new_len = old_len - (end_index - start_index) + change_text.len;
|
||||||
if (new_len > document.mem.len) {
|
if (new_len >= document.mem.len) {
|
||||||
// We need to reallocate memory.
|
// We need to reallocate memory.
|
||||||
// We reallocate twice the current filesize or the new length, if it's more than that
|
// We reallocate twice the current filesize or the new length, if it's more than that
|
||||||
// so that we can reduce the amount of realloc calls.
|
// so that we can reduce the amount of realloc calls.
|
||||||
// We can tune this to find a better size if needed.
|
// We can tune this to find a better size if needed.
|
||||||
const realloc_len = std.math.max(2 * old_len, new_len);
|
const realloc_len = std.math.max(2 * old_len, new_len + 1);
|
||||||
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -546,19 +552,21 @@ pub fn applyChanges(
|
|||||||
std.mem.copy(u8, document.mem[start_index..][0..change_text.len], change_text);
|
std.mem.copy(u8, document.mem[start_index..][0..change_text.len], change_text);
|
||||||
|
|
||||||
// Reset the text substring.
|
// Reset the text substring.
|
||||||
document.text = document.mem[0..new_len];
|
document.mem[new_len] = 0;
|
||||||
|
document.text = document.mem[0..new_len :0];
|
||||||
} else {
|
} else {
|
||||||
const change_text = change.Object.get("text").?.String;
|
const change_text = change.Object.get("text").?.String;
|
||||||
const old_len = document.text.len;
|
const old_len = document.text.len;
|
||||||
|
|
||||||
if (change_text.len > document.mem.len) {
|
if (change_text.len >= document.mem.len) {
|
||||||
// Like above.
|
// Like above.
|
||||||
const realloc_len = std.math.max(2 * old_len, change_text.len);
|
const realloc_len = std.math.max(2 * old_len, change_text.len + 1);
|
||||||
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
std.mem.copy(u8, document.mem[0..change_text.len], change_text);
|
std.mem.copy(u8, document.mem[0..change_text.len], change_text);
|
||||||
document.text = document.mem[0..change_text.len];
|
document.mem[change_text.len] = 0;
|
||||||
|
document.text = document.mem[0..change_text.len :0];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -658,7 +666,13 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
|
|
||||||
defer file.close();
|
defer file.close();
|
||||||
{
|
{
|
||||||
const file_contents = file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| switch (err) {
|
const file_contents = file.readToEndAllocOptions(
|
||||||
|
allocator,
|
||||||
|
std.math.maxInt(usize),
|
||||||
|
null,
|
||||||
|
@alignOf(u8),
|
||||||
|
0,
|
||||||
|
) catch |err| switch (err) {
|
||||||
error.OutOfMemory => return error.OutOfMemory,
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
else => {
|
else => {
|
||||||
log.debug("Could not read from file {s}", .{file_path});
|
log.debug("Could not read from file {s}", .{file_path});
|
||||||
|
19
src/main.zig
19
src/main.zig
@ -774,9 +774,14 @@ fn getSymbolFieldAccess(
|
|||||||
|
|
||||||
const name = identifierFromPosition(position.absolute_index, handle.*);
|
const name = identifierFromPosition(position.absolute_index, handle.*);
|
||||||
if (name.len == 0) return null;
|
if (name.len == 0) return null;
|
||||||
var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]);
|
|
||||||
|
|
||||||
|
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
||||||
|
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
||||||
|
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||||
|
|
||||||
|
errdefer held_range.release();
|
||||||
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
||||||
|
held_range.release();
|
||||||
const container_handle = result.unwrapped orelse result.original;
|
const container_handle = result.unwrapped orelse result.original;
|
||||||
const container_handle_node = switch (container_handle.type.data) {
|
const container_handle_node = switch (container_handle.type.data) {
|
||||||
.other => |n| n,
|
.other => |n| n,
|
||||||
@ -1147,8 +1152,14 @@ fn completeFieldAccess(
|
|||||||
config: Config,
|
config: Config,
|
||||||
) !void {
|
) !void {
|
||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]);
|
|
||||||
|
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
||||||
|
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
||||||
|
errdefer held_range.release();
|
||||||
|
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||||
|
|
||||||
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
||||||
|
held_range.release();
|
||||||
try typeToCompletion(arena, &completions, result, handle, config);
|
try typeToCompletion(arena, &completions, result, handle, config);
|
||||||
truncateCompletions(completions.items, config.max_detail_length);
|
truncateCompletions(completions.items, config.max_detail_length);
|
||||||
}
|
}
|
||||||
@ -1759,7 +1770,9 @@ var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_
|
|||||||
pub fn main() anyerror!void {
|
pub fn main() anyerror!void {
|
||||||
defer _ = gpa_state.deinit();
|
defer _ = gpa_state.deinit();
|
||||||
defer keep_running = false;
|
defer keep_running = false;
|
||||||
allocator = &gpa_state.allocator;
|
// allocator = &gpa_state.allocator;
|
||||||
|
// @TODO Using the GPA here, realloc calls hang currently for some reason
|
||||||
|
allocator = std.heap.page_allocator;
|
||||||
|
|
||||||
analysis.init(allocator);
|
analysis.init(allocator);
|
||||||
defer analysis.deinit();
|
defer analysis.deinit();
|
||||||
|
@ -264,9 +264,12 @@ pub fn getSignatureInfo(
|
|||||||
const expr_start = token_starts[expr_first_token];
|
const expr_start = token_starts[expr_first_token];
|
||||||
const last_token_slice = tree.tokenSlice(expr_last_token);
|
const last_token_slice = tree.tokenSlice(expr_last_token);
|
||||||
const expr_end = token_starts[expr_last_token] + last_token_slice.len;
|
const expr_end = token_starts[expr_last_token] + last_token_slice.len;
|
||||||
const expr_source = tree.source[expr_start..expr_end];
|
|
||||||
|
var held_expr = handle.document.borrowNullTerminatedSlice(expr_start, expr_end);
|
||||||
|
defer held_expr.release();
|
||||||
|
|
||||||
// Resolve the expression.
|
// Resolve the expression.
|
||||||
var tokenizer = std.zig.Tokenizer.init(expr_source);
|
var tokenizer = std.zig.Tokenizer.init(held_expr.data());
|
||||||
if (try analysis.getFieldAccessType(
|
if (try analysis.getFieldAccessType(
|
||||||
document_store,
|
document_store,
|
||||||
arena,
|
arena,
|
||||||
|
@ -114,9 +114,36 @@ pub const Diagnostic = struct {
|
|||||||
pub const TextDocument = struct {
|
pub const TextDocument = struct {
|
||||||
uri: []const u8,
|
uri: []const u8,
|
||||||
// This is a substring of mem starting at 0
|
// This is a substring of mem starting at 0
|
||||||
text: []const u8,
|
text: [:0]const u8,
|
||||||
// This holds the memory that we have actually allocated.
|
// This holds the memory that we have actually allocated.
|
||||||
mem: []u8,
|
mem: []u8,
|
||||||
|
|
||||||
|
const Held = struct {
|
||||||
|
document: *const TextDocument,
|
||||||
|
popped: u8,
|
||||||
|
start_index: usize,
|
||||||
|
end_index: usize,
|
||||||
|
|
||||||
|
pub fn data(self: @This()) [:0]const u8 {
|
||||||
|
return self.document.mem[self.start_index..self.end_index :0];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn release(self: *@This()) void {
|
||||||
|
self.document.mem[self.end_index] = self.popped;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn borrowNullTerminatedSlice(self: *const @This(), start_idx: usize, end_idx: usize) Held {
|
||||||
|
std.debug.assert(end_idx >= start_idx and end_idx < self.text.len);
|
||||||
|
const popped_char = self.mem[end_idx];
|
||||||
|
self.mem[end_idx] = 0;
|
||||||
|
return .{
|
||||||
|
.document = self,
|
||||||
|
.popped = popped_char,
|
||||||
|
.start_index = start_idx,
|
||||||
|
.end_index = end_idx,
|
||||||
|
};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const WorkspaceEdit = struct {
|
pub const WorkspaceEdit = struct {
|
||||||
|
Loading…
Reference in New Issue
Block a user