Fix build for zig master by inserting null terminators when tokenizing and resetting them when needed
This commit is contained in:
parent
d38d67ab7d
commit
bba069526c
169
src/analysis.zig
169
src/analysis.zig
@ -1525,90 +1525,99 @@ pub fn documentPositionContext(
|
||||
_ = document;
|
||||
|
||||
const line = doc_position.line;
|
||||
var tokenizer = std.zig.Tokenizer.init(line[0..doc_position.line_index]);
|
||||
|
||||
const line_mem_start = @ptrToInt(line.ptr) - @ptrToInt(document.mem.ptr);
|
||||
var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8);
|
||||
{
|
||||
var held_line = document.borrowNullTerminatedSlice(
|
||||
line_mem_start,
|
||||
line_mem_start + doc_position.line_index,
|
||||
);
|
||||
defer held_line.release();
|
||||
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
||||
|
||||
while (true) {
|
||||
const tok = tokenizer.next();
|
||||
// Early exits.
|
||||
switch (tok.tag) {
|
||||
.invalid => {
|
||||
// Single '@' do not return a builtin token so we check this on our own.
|
||||
if (line[doc_position.line_index - 1] == '@') {
|
||||
return PositionContext{
|
||||
.builtin = .{
|
||||
.start = doc_position.line_index - 1,
|
||||
.end = doc_position.line_index,
|
||||
},
|
||||
};
|
||||
}
|
||||
return .other;
|
||||
},
|
||||
.doc_comment, .container_doc_comment => return .comment,
|
||||
.eof => break,
|
||||
else => {},
|
||||
}
|
||||
while (true) {
|
||||
const tok = tokenizer.next();
|
||||
// Early exits.
|
||||
switch (tok.tag) {
|
||||
.invalid => {
|
||||
// Single '@' do not return a builtin token so we check this on our own.
|
||||
if (line[doc_position.line_index - 1] == '@') {
|
||||
return PositionContext{
|
||||
.builtin = .{
|
||||
.start = doc_position.line_index - 1,
|
||||
.end = doc_position.line_index,
|
||||
},
|
||||
};
|
||||
}
|
||||
return .other;
|
||||
},
|
||||
.doc_comment, .container_doc_comment => return .comment,
|
||||
.eof => break,
|
||||
else => {},
|
||||
}
|
||||
|
||||
// State changes
|
||||
var curr_ctx = try peek(&stack);
|
||||
switch (tok.tag) {
|
||||
.string_literal, .multiline_string_literal_line => curr_ctx.ctx = .{ .string_literal = tok.loc },
|
||||
.identifier => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .{ .var_access = tok.loc },
|
||||
.label => |filled| if (!filled) {
|
||||
curr_ctx.ctx = .{ .label = true };
|
||||
// State changes
|
||||
var curr_ctx = try peek(&stack);
|
||||
switch (tok.tag) {
|
||||
.string_literal, .multiline_string_literal_line => curr_ctx.ctx = .{ .string_literal = tok.loc },
|
||||
.identifier => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .{ .var_access = tok.loc },
|
||||
.label => |filled| if (!filled) {
|
||||
curr_ctx.ctx = .{ .label = true };
|
||||
} else {
|
||||
curr_ctx.ctx = .{ .var_access = tok.loc };
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.builtin => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .{ .builtin = tok.loc },
|
||||
else => {},
|
||||
},
|
||||
.period, .period_asterisk => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .enum_literal,
|
||||
.enum_literal => curr_ctx.ctx = .empty,
|
||||
.field_access => {},
|
||||
.other => {},
|
||||
.global_error_set => {},
|
||||
else => curr_ctx.ctx = .{
|
||||
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
|
||||
},
|
||||
},
|
||||
.keyword_break, .keyword_continue => curr_ctx.ctx = .pre_label,
|
||||
.colon => if (curr_ctx.ctx == .pre_label) {
|
||||
curr_ctx.ctx = .{ .label = false };
|
||||
} else {
|
||||
curr_ctx.ctx = .{ .var_access = tok.loc };
|
||||
curr_ctx.ctx = .empty;
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.builtin => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .{ .builtin = tok.loc },
|
||||
else => {},
|
||||
},
|
||||
.period, .period_asterisk => switch (curr_ctx.ctx) {
|
||||
.empty, .pre_label => curr_ctx.ctx = .enum_literal,
|
||||
.enum_literal => curr_ctx.ctx = .empty,
|
||||
.field_access => {},
|
||||
.other => {},
|
||||
.global_error_set => {},
|
||||
else => curr_ctx.ctx = .{
|
||||
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
|
||||
.question_mark => switch (curr_ctx.ctx) {
|
||||
.field_access => {},
|
||||
else => curr_ctx.ctx = .empty,
|
||||
},
|
||||
},
|
||||
.keyword_break, .keyword_continue => curr_ctx.ctx = .pre_label,
|
||||
.colon => if (curr_ctx.ctx == .pre_label) {
|
||||
curr_ctx.ctx = .{ .label = false };
|
||||
} else {
|
||||
curr_ctx.ctx = .empty;
|
||||
},
|
||||
.question_mark => switch (curr_ctx.ctx) {
|
||||
.field_access => {},
|
||||
.l_paren => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }),
|
||||
.l_bracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }),
|
||||
.r_paren => {
|
||||
_ = stack.pop();
|
||||
if (curr_ctx.stack_id != .Paren) {
|
||||
(try peek(&stack)).ctx = .empty;
|
||||
}
|
||||
},
|
||||
.r_bracket => {
|
||||
_ = stack.pop();
|
||||
if (curr_ctx.stack_id != .Bracket) {
|
||||
(try peek(&stack)).ctx = .empty;
|
||||
}
|
||||
},
|
||||
.keyword_error => curr_ctx.ctx = .global_error_set,
|
||||
else => curr_ctx.ctx = .empty,
|
||||
},
|
||||
.l_paren => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }),
|
||||
.l_bracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }),
|
||||
.r_paren => {
|
||||
_ = stack.pop();
|
||||
if (curr_ctx.stack_id != .Paren) {
|
||||
(try peek(&stack)).ctx = .empty;
|
||||
}
|
||||
},
|
||||
.r_bracket => {
|
||||
_ = stack.pop();
|
||||
if (curr_ctx.stack_id != .Bracket) {
|
||||
(try peek(&stack)).ctx = .empty;
|
||||
}
|
||||
},
|
||||
.keyword_error => curr_ctx.ctx = .global_error_set,
|
||||
else => curr_ctx.ctx = .empty,
|
||||
}
|
||||
}
|
||||
|
||||
switch (curr_ctx.ctx) {
|
||||
.field_access => |r| curr_ctx.ctx = .{
|
||||
.field_access = tokenRangeAppend(r, tok),
|
||||
},
|
||||
else => {},
|
||||
switch (curr_ctx.ctx) {
|
||||
.field_access => |r| curr_ctx.ctx = .{
|
||||
.field_access = tokenRangeAppend(r, tok),
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1628,11 +1637,17 @@ pub fn documentPositionContext(
|
||||
}
|
||||
}
|
||||
if (doc_position.line_index < line.len) {
|
||||
var held_line = document.borrowNullTerminatedSlice(
|
||||
line_mem_start + doc_position.line_index,
|
||||
line_mem_start + line.len,
|
||||
);
|
||||
defer held_line.release();
|
||||
|
||||
switch (line[doc_position.line_index]) {
|
||||
'a'...'z', 'A'...'Z', '_', '@' => {},
|
||||
else => break :block .empty,
|
||||
}
|
||||
tokenizer = std.zig.Tokenizer.init(line[doc_position.line_index..]);
|
||||
var tokenizer = std.zig.Tokenizer.init(held_line.data());
|
||||
const tok = tokenizer.next();
|
||||
if (tok.tag == .identifier)
|
||||
break :block PositionContext{ .var_access = tok.loc };
|
||||
|
@ -177,7 +177,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
||||
|
||||
/// This function asserts the document is not open yet and takes ownership
|
||||
/// of the uri and text passed in.
|
||||
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Handle {
|
||||
fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Handle {
|
||||
log.debug("Opened document: {s}", .{uri});
|
||||
|
||||
var handle = try self.allocator.create(Handle);
|
||||
@ -297,7 +297,13 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
||||
}
|
||||
|
||||
// Read the build file, create a new document, set the candidate to the new build file.
|
||||
const build_file_text = try build_file.readToEndAlloc(self.allocator, std.math.maxInt(usize));
|
||||
const build_file_text = try build_file.readToEndAllocOptions(
|
||||
self.allocator,
|
||||
std.math.maxInt(usize),
|
||||
null,
|
||||
@alignOf(u8),
|
||||
0,
|
||||
);
|
||||
errdefer self.allocator.free(build_file_text);
|
||||
|
||||
const build_file_handle = try self.newDocument(build_file_uri, build_file_text);
|
||||
@ -336,7 +342,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
|
||||
return entry.value_ptr.*;
|
||||
}
|
||||
|
||||
const duped_text = try std.mem.dupe(self.allocator, u8, text);
|
||||
const duped_text = try std.mem.dupeZ(self.allocator, u8, text);
|
||||
errdefer self.allocator.free(duped_text);
|
||||
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
|
||||
errdefer self.allocator.free(duped_uri);
|
||||
@ -505,7 +511,7 @@ pub fn applyChanges(
|
||||
|
||||
for (content_changes.items) |change| {
|
||||
if (change.Object.get("range")) |range| {
|
||||
std.debug.assert(document.text.ptr == document.mem.ptr);
|
||||
std.debug.assert(@ptrCast([*]const u8, document.text.ptr) == document.mem.ptr);
|
||||
|
||||
// TODO: add tests and validate the JSON
|
||||
const start_obj = range.Object.get("start").?.Object;
|
||||
@ -525,12 +531,12 @@ pub fn applyChanges(
|
||||
|
||||
const old_len = document.text.len;
|
||||
const new_len = old_len - (end_index - start_index) + change_text.len;
|
||||
if (new_len > document.mem.len) {
|
||||
if (new_len >= document.mem.len) {
|
||||
// We need to reallocate memory.
|
||||
// We reallocate twice the current filesize or the new length, if it's more than that
|
||||
// so that we can reduce the amount of realloc calls.
|
||||
// We can tune this to find a better size if needed.
|
||||
const realloc_len = std.math.max(2 * old_len, new_len);
|
||||
const realloc_len = std.math.max(2 * old_len, new_len + 1);
|
||||
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
||||
}
|
||||
|
||||
@ -546,19 +552,21 @@ pub fn applyChanges(
|
||||
std.mem.copy(u8, document.mem[start_index..][0..change_text.len], change_text);
|
||||
|
||||
// Reset the text substring.
|
||||
document.text = document.mem[0..new_len];
|
||||
document.mem[new_len] = 0;
|
||||
document.text = document.mem[0..new_len :0];
|
||||
} else {
|
||||
const change_text = change.Object.get("text").?.String;
|
||||
const old_len = document.text.len;
|
||||
|
||||
if (change_text.len > document.mem.len) {
|
||||
if (change_text.len >= document.mem.len) {
|
||||
// Like above.
|
||||
const realloc_len = std.math.max(2 * old_len, change_text.len);
|
||||
const realloc_len = std.math.max(2 * old_len, change_text.len + 1);
|
||||
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
||||
}
|
||||
|
||||
std.mem.copy(u8, document.mem[0..change_text.len], change_text);
|
||||
document.text = document.mem[0..change_text.len];
|
||||
document.mem[change_text.len] = 0;
|
||||
document.text = document.mem[0..change_text.len :0];
|
||||
}
|
||||
}
|
||||
|
||||
@ -658,7 +666,13 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
||||
|
||||
defer file.close();
|
||||
{
|
||||
const file_contents = file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| switch (err) {
|
||||
const file_contents = file.readToEndAllocOptions(
|
||||
allocator,
|
||||
std.math.maxInt(usize),
|
||||
null,
|
||||
@alignOf(u8),
|
||||
0,
|
||||
) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => {
|
||||
log.debug("Could not read from file {s}", .{file_path});
|
||||
|
19
src/main.zig
19
src/main.zig
@ -774,9 +774,14 @@ fn getSymbolFieldAccess(
|
||||
|
||||
const name = identifierFromPosition(position.absolute_index, handle.*);
|
||||
if (name.len == 0) return null;
|
||||
var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]);
|
||||
|
||||
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
||||
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
||||
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||
|
||||
errdefer held_range.release();
|
||||
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
||||
held_range.release();
|
||||
const container_handle = result.unwrapped orelse result.original;
|
||||
const container_handle_node = switch (container_handle.type.data) {
|
||||
.other => |n| n,
|
||||
@ -1147,8 +1152,14 @@ fn completeFieldAccess(
|
||||
config: Config,
|
||||
) !void {
|
||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||
var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]);
|
||||
|
||||
const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr);
|
||||
var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end);
|
||||
errdefer held_range.release();
|
||||
var tokenizer = std.zig.Tokenizer.init(held_range.data());
|
||||
|
||||
if (try analysis.getFieldAccessType(&document_store, arena, handle, position.absolute_index, &tokenizer)) |result| {
|
||||
held_range.release();
|
||||
try typeToCompletion(arena, &completions, result, handle, config);
|
||||
truncateCompletions(completions.items, config.max_detail_length);
|
||||
}
|
||||
@ -1759,7 +1770,9 @@ var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_
|
||||
pub fn main() anyerror!void {
|
||||
defer _ = gpa_state.deinit();
|
||||
defer keep_running = false;
|
||||
allocator = &gpa_state.allocator;
|
||||
// allocator = &gpa_state.allocator;
|
||||
// @TODO Using the GPA here, realloc calls hang currently for some reason
|
||||
allocator = std.heap.page_allocator;
|
||||
|
||||
analysis.init(allocator);
|
||||
defer analysis.deinit();
|
||||
|
@ -264,9 +264,12 @@ pub fn getSignatureInfo(
|
||||
const expr_start = token_starts[expr_first_token];
|
||||
const last_token_slice = tree.tokenSlice(expr_last_token);
|
||||
const expr_end = token_starts[expr_last_token] + last_token_slice.len;
|
||||
const expr_source = tree.source[expr_start..expr_end];
|
||||
|
||||
var held_expr = handle.document.borrowNullTerminatedSlice(expr_start, expr_end);
|
||||
defer held_expr.release();
|
||||
|
||||
// Resolve the expression.
|
||||
var tokenizer = std.zig.Tokenizer.init(expr_source);
|
||||
var tokenizer = std.zig.Tokenizer.init(held_expr.data());
|
||||
if (try analysis.getFieldAccessType(
|
||||
document_store,
|
||||
arena,
|
||||
|
@ -114,9 +114,36 @@ pub const Diagnostic = struct {
|
||||
pub const TextDocument = struct {
|
||||
uri: []const u8,
|
||||
// This is a substring of mem starting at 0
|
||||
text: []const u8,
|
||||
text: [:0]const u8,
|
||||
// This holds the memory that we have actually allocated.
|
||||
mem: []u8,
|
||||
|
||||
const Held = struct {
|
||||
document: *const TextDocument,
|
||||
popped: u8,
|
||||
start_index: usize,
|
||||
end_index: usize,
|
||||
|
||||
pub fn data(self: @This()) [:0]const u8 {
|
||||
return self.document.mem[self.start_index..self.end_index :0];
|
||||
}
|
||||
|
||||
pub fn release(self: *@This()) void {
|
||||
self.document.mem[self.end_index] = self.popped;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn borrowNullTerminatedSlice(self: *const @This(), start_idx: usize, end_idx: usize) Held {
|
||||
std.debug.assert(end_idx >= start_idx and end_idx < self.text.len);
|
||||
const popped_char = self.mem[end_idx];
|
||||
self.mem[end_idx] = 0;
|
||||
return .{
|
||||
.document = self,
|
||||
.popped = popped_char,
|
||||
.start_index = start_idx,
|
||||
.end_index = end_idx,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const WorkspaceEdit = struct {
|
||||
|
Loading…
Reference in New Issue
Block a user