Merge pull request #1000 from Techatrix/stage2-zir
Embed AstGen into ZLS
This commit is contained in:
commit
471d971d1d
@ -79,6 +79,7 @@ The following options are currently available.
|
|||||||
| `include_at_in_builtins` | `bool` | `false` | Whether the @ sign should be part of the completion of builtins |
|
| `include_at_in_builtins` | `bool` | `false` | Whether the @ sign should be part of the completion of builtins |
|
||||||
| `skip_std_references` | `bool` | `false` | When true, skips searching for references in std. Improves lookup speed for functions in user's code. Renaming and go-to-definition will continue to work as is |
|
| `skip_std_references` | `bool` | `false` | When true, skips searching for references in std. Improves lookup speed for functions in user's code. Renaming and go-to-definition will continue to work as is |
|
||||||
| `max_detail_length` | `usize` | `1048576` | The detail field of completions is truncated to be no longer than this (in bytes) |
|
| `max_detail_length` | `usize` | `1048576` | The detail field of completions is truncated to be no longer than this (in bytes) |
|
||||||
|
| `prefer_ast_check_as_child_process` | `bool` | `true` | Can be used in conjuction with `enable_ast_check_diagnostics` to favor using `zig ast-check` instead of ZLS's fork |
|
||||||
| `record_session` | `bool` | `false` | When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay` |
|
| `record_session` | `bool` | `false` | When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay` |
|
||||||
| `record_session_path` | `?[]const u8` | `null` | Output file path when `record_session` is set. The recommended file extension *.zlsreplay |
|
| `record_session_path` | `?[]const u8` | `null` | Output file path when `record_session` is set. The recommended file extension *.zlsreplay |
|
||||||
| `replay_session_path` | `?[]const u8` | `null` | Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path. |
|
| `replay_session_path` | `?[]const u8` | `null` | Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path. |
|
||||||
|
@ -94,6 +94,11 @@
|
|||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": "1048576"
|
"default": "1048576"
|
||||||
},
|
},
|
||||||
|
"prefer_ast_check_as_child_process": {
|
||||||
|
"description": "Can be used in conjuction with `enable_ast_check_diagnostics` to favor using `zig ast-check` instead of ZLS's fork",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": "true"
|
||||||
|
},
|
||||||
"record_session": {
|
"record_session": {
|
||||||
"description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`",
|
"description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`",
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
|
@ -61,6 +61,9 @@ skip_std_references: bool = false,
|
|||||||
/// The detail field of completions is truncated to be no longer than this (in bytes)
|
/// The detail field of completions is truncated to be no longer than this (in bytes)
|
||||||
max_detail_length: usize = 1048576,
|
max_detail_length: usize = 1048576,
|
||||||
|
|
||||||
|
/// Can be used in conjuction with `enable_ast_check_diagnostics` to favor using `zig ast-check` instead of ZLS's fork
|
||||||
|
prefer_ast_check_as_child_process: bool = true,
|
||||||
|
|
||||||
/// When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`
|
/// When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`
|
||||||
record_session: bool = false,
|
record_session: bool = false,
|
||||||
|
|
||||||
|
@ -13,6 +13,8 @@ const Config = @import("Config.zig");
|
|||||||
const ZigVersionWrapper = @import("ZigVersionWrapper.zig");
|
const ZigVersionWrapper = @import("ZigVersionWrapper.zig");
|
||||||
const translate_c = @import("translate_c.zig");
|
const translate_c = @import("translate_c.zig");
|
||||||
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
|
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
|
||||||
|
const AstGen = @import("stage2/AstGen.zig");
|
||||||
|
const Zir = @import("stage2/Zir.zig");
|
||||||
|
|
||||||
const DocumentStore = @This();
|
const DocumentStore = @This();
|
||||||
|
|
||||||
@ -56,6 +58,13 @@ pub const Handle = struct {
|
|||||||
uri: Uri,
|
uri: Uri,
|
||||||
text: [:0]const u8,
|
text: [:0]const u8,
|
||||||
tree: Ast,
|
tree: Ast,
|
||||||
|
/// do not access unless `zir_status != .none`
|
||||||
|
zir: Zir = undefined,
|
||||||
|
zir_status: enum {
|
||||||
|
none,
|
||||||
|
outdated,
|
||||||
|
done,
|
||||||
|
} = .none,
|
||||||
/// Not null if a ComptimeInterpreter is actually used
|
/// Not null if a ComptimeInterpreter is actually used
|
||||||
interpreter: ?*ComptimeInterpreter = null,
|
interpreter: ?*ComptimeInterpreter = null,
|
||||||
document_scope: analysis.DocumentScope,
|
document_scope: analysis.DocumentScope,
|
||||||
@ -74,6 +83,7 @@ pub const Handle = struct {
|
|||||||
allocator.destroy(interpreter);
|
allocator.destroy(interpreter);
|
||||||
}
|
}
|
||||||
self.document_scope.deinit(allocator);
|
self.document_scope.deinit(allocator);
|
||||||
|
if (self.zir_status != .none) self.zir.deinit(allocator);
|
||||||
self.tree.deinit(allocator);
|
self.tree.deinit(allocator);
|
||||||
allocator.free(self.text);
|
allocator.free(self.text);
|
||||||
allocator.free(self.uri);
|
allocator.free(self.uri);
|
||||||
@ -214,6 +224,15 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
|
|||||||
handle.tree.deinit(self.allocator);
|
handle.tree.deinit(self.allocator);
|
||||||
handle.tree = new_tree;
|
handle.tree = new_tree;
|
||||||
|
|
||||||
|
if (self.wantZir() and handle.open and new_tree.errors.len == 0) {
|
||||||
|
const new_zir = try AstGen.generate(self.allocator, new_tree);
|
||||||
|
if (handle.zir_status != .none) handle.zir.deinit(self.allocator);
|
||||||
|
handle.zir = new_zir;
|
||||||
|
handle.zir_status = .done;
|
||||||
|
} else if (handle.zir_status == .done) {
|
||||||
|
handle.zir_status = .outdated;
|
||||||
|
}
|
||||||
|
|
||||||
var new_document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
|
var new_document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
|
||||||
handle.document_scope.deinit(self.allocator);
|
handle.document_scope.deinit(self.allocator);
|
||||||
handle.document_scope = new_document_scope;
|
handle.document_scope = new_document_scope;
|
||||||
@ -698,17 +717,31 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, open: bool
|
|||||||
var tree = try Ast.parse(self.allocator, text, .zig);
|
var tree = try Ast.parse(self.allocator, text, .zig);
|
||||||
errdefer tree.deinit(self.allocator);
|
errdefer tree.deinit(self.allocator);
|
||||||
|
|
||||||
|
// remove unused capacity
|
||||||
var nodes = tree.nodes.toMultiArrayList();
|
var nodes = tree.nodes.toMultiArrayList();
|
||||||
try nodes.setCapacity(self.allocator, nodes.len);
|
try nodes.setCapacity(self.allocator, nodes.len);
|
||||||
tree.nodes = nodes.slice();
|
tree.nodes = nodes.slice();
|
||||||
|
|
||||||
|
// remove unused capacity
|
||||||
var tokens = tree.tokens.toMultiArrayList();
|
var tokens = tree.tokens.toMultiArrayList();
|
||||||
try tokens.setCapacity(self.allocator, tokens.len);
|
try tokens.setCapacity(self.allocator, tokens.len);
|
||||||
tree.tokens = tokens.slice();
|
tree.tokens = tokens.slice();
|
||||||
|
|
||||||
|
const generate_zir = self.wantZir() and open and tree.errors.len == 0;
|
||||||
|
var zir: ?Zir = if (generate_zir) try AstGen.generate(self.allocator, tree) else null;
|
||||||
|
errdefer if (zir) |*code| code.deinit(self.allocator);
|
||||||
|
|
||||||
|
// remove unused capacity
|
||||||
|
if (zir) |*code| {
|
||||||
|
var instructions = code.instructions.toMultiArrayList();
|
||||||
|
try instructions.setCapacity(self.allocator, instructions.len);
|
||||||
|
code.instructions = instructions.slice();
|
||||||
|
}
|
||||||
|
|
||||||
var document_scope = try analysis.makeDocumentScope(self.allocator, tree);
|
var document_scope = try analysis.makeDocumentScope(self.allocator, tree);
|
||||||
errdefer document_scope.deinit(self.allocator);
|
errdefer document_scope.deinit(self.allocator);
|
||||||
|
|
||||||
|
// remove unused capacity
|
||||||
try document_scope.scopes.setCapacity(self.allocator, document_scope.scopes.len);
|
try document_scope.scopes.setCapacity(self.allocator, document_scope.scopes.len);
|
||||||
|
|
||||||
break :blk Handle{
|
break :blk Handle{
|
||||||
@ -716,6 +749,8 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, open: bool
|
|||||||
.uri = duped_uri,
|
.uri = duped_uri,
|
||||||
.text = text,
|
.text = text,
|
||||||
.tree = tree,
|
.tree = tree,
|
||||||
|
.zir = if (zir) |code| code else undefined,
|
||||||
|
.zir_status = if (zir != null) .done else .none,
|
||||||
.document_scope = document_scope,
|
.document_scope = document_scope,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -1084,6 +1119,12 @@ pub fn enumCompletionItems(self: DocumentStore, arena: std.mem.Allocator, handle
|
|||||||
return try self.tagStoreCompletionItems(arena, handle, "enum_completions");
|
return try self.tagStoreCompletionItems(arena, handle, "enum_completions");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn wantZir(self: DocumentStore) bool {
|
||||||
|
if (!self.config.enable_ast_check_diagnostics) return false;
|
||||||
|
const can_run_ast_check = std.process.can_spawn and self.config.zig_exe_path != null and self.config.prefer_ast_check_as_child_process;
|
||||||
|
return !can_run_ast_check;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ensureInterpreterExists(self: *DocumentStore, uri: Uri) !*ComptimeInterpreter {
|
pub fn ensureInterpreterExists(self: *DocumentStore, uri: Uri) !*ComptimeInterpreter {
|
||||||
var handle = self.handles.get(uri).?;
|
var handle = self.handles.get(uri).?;
|
||||||
if (handle.interpreter != null) return handle.interpreter.?;
|
if (handle.interpreter != null) return handle.interpreter.?;
|
||||||
|
314
src/Server.zig
314
src/Server.zig
@ -29,6 +29,7 @@ const completions = @import("features/completions.zig");
|
|||||||
const goto = @import("features/goto.zig");
|
const goto = @import("features/goto.zig");
|
||||||
const hover_handler = @import("features/hover.zig");
|
const hover_handler = @import("features/hover.zig");
|
||||||
const selection_range = @import("features/selection_range.zig");
|
const selection_range = @import("features/selection_range.zig");
|
||||||
|
const diagnostics_gen = @import("features/diagnostics.zig");
|
||||||
|
|
||||||
const tres = @import("tres");
|
const tres = @import("tres");
|
||||||
|
|
||||||
@ -209,285 +210,6 @@ fn showMessage(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutOfMemory}!types.PublishDiagnosticsParams {
|
|
||||||
const tracy_zone = tracy.trace(@src());
|
|
||||||
defer tracy_zone.end();
|
|
||||||
|
|
||||||
std.debug.assert(server.client_capabilities.supports_publish_diagnostics);
|
|
||||||
|
|
||||||
const tree = handle.tree;
|
|
||||||
|
|
||||||
var allocator = server.arena.allocator();
|
|
||||||
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
|
||||||
|
|
||||||
for (tree.errors) |err| {
|
|
||||||
var mem_buffer: [256]u8 = undefined;
|
|
||||||
var fbs = std.io.fixedBufferStream(&mem_buffer);
|
|
||||||
tree.renderError(err, fbs.writer()) catch if (std.debug.runtime_safety) unreachable else continue; // if an error occurs here increase buffer size
|
|
||||||
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.tokenToRange(tree, err.token, server.offset_encoding),
|
|
||||||
.severity = .Error,
|
|
||||||
.code = .{ .string = @tagName(err.tag) },
|
|
||||||
.source = "zls",
|
|
||||||
.message = try server.arena.allocator().dupe(u8, fbs.getWritten()),
|
|
||||||
// .relatedInformation = undefined
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) {
|
|
||||||
getAstCheckDiagnostics(server, handle, &diagnostics) catch |err| {
|
|
||||||
log.err("failed to run ast-check: {}", .{err});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (server.config.warn_style) {
|
|
||||||
var node: u32 = 0;
|
|
||||||
while (node < tree.nodes.len) : (node += 1) {
|
|
||||||
if (ast.isBuiltinCall(tree, node)) {
|
|
||||||
const builtin_token = tree.nodes.items(.main_token)[node];
|
|
||||||
const call_name = tree.tokenSlice(builtin_token);
|
|
||||||
|
|
||||||
if (!std.mem.eql(u8, call_name, "@import")) continue;
|
|
||||||
|
|
||||||
var buffer: [2]Ast.Node.Index = undefined;
|
|
||||||
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
|
||||||
|
|
||||||
if (params.len != 1) continue;
|
|
||||||
|
|
||||||
const import_str_token = tree.nodes.items(.main_token)[params[0]];
|
|
||||||
const import_str = tree.tokenSlice(import_str_token);
|
|
||||||
|
|
||||||
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding),
|
|
||||||
.severity = .Hint,
|
|
||||||
.code = .{ .string = "dot_slash_import" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = "A ./ is not needed in imports",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: style warnings for types, values and declarations below root scope
|
|
||||||
if (tree.errors.len == 0) {
|
|
||||||
for (tree.rootDecls()) |decl_idx| {
|
|
||||||
const decl = tree.nodes.items(.tag)[decl_idx];
|
|
||||||
switch (decl) {
|
|
||||||
.fn_proto,
|
|
||||||
.fn_proto_multi,
|
|
||||||
.fn_proto_one,
|
|
||||||
.fn_proto_simple,
|
|
||||||
.fn_decl,
|
|
||||||
=> blk: {
|
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
|
||||||
const func = tree.fullFnProto(&buf, decl_idx).?;
|
|
||||||
if (func.extern_export_inline_token != null) break :blk;
|
|
||||||
|
|
||||||
if (func.name_token) |name_token| {
|
|
||||||
const is_type_function = Analyser.isTypeFunction(tree, func);
|
|
||||||
|
|
||||||
const func_name = tree.tokenSlice(name_token);
|
|
||||||
if (!is_type_function and !Analyser.isCamelCase(func_name)) {
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
|
||||||
.severity = .Hint,
|
|
||||||
.code = .{ .string = "bad_style" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = "Functions should be camelCase",
|
|
||||||
});
|
|
||||||
} else if (is_type_function and !Analyser.isPascalCase(func_name)) {
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
|
||||||
.severity = .Hint,
|
|
||||||
.code = .{ .string = "bad_style" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = "Type functions should be PascalCase",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (handle.cimports.items(.hash), handle.cimports.items(.node)) |hash, node| {
|
|
||||||
const result = server.document_store.cimports.get(hash) orelse continue;
|
|
||||||
if (result != .failure) continue;
|
|
||||||
const stderr = std.mem.trim(u8, result.failure, " ");
|
|
||||||
|
|
||||||
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
|
|
||||||
_ = pos_and_diag_iterator.next(); // skip file path
|
|
||||||
_ = pos_and_diag_iterator.next(); // skip line
|
|
||||||
_ = pos_and_diag_iterator.next(); // skip character
|
|
||||||
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.nodeToRange(handle.tree, node, server.offset_encoding),
|
|
||||||
.severity = .Error,
|
|
||||||
.code = .{ .string = "cImport" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = try allocator.dupe(u8, pos_and_diag_iterator.rest()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (server.config.highlight_global_var_declarations) {
|
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
|
||||||
const tags = tree.tokens.items(.tag);
|
|
||||||
for (tree.rootDecls()) |decl| {
|
|
||||||
const decl_tag = tree.nodes.items(.tag)[decl];
|
|
||||||
const decl_main_token = tree.nodes.items(.main_token)[decl];
|
|
||||||
|
|
||||||
switch (decl_tag) {
|
|
||||||
.simple_var_decl,
|
|
||||||
.aligned_var_decl,
|
|
||||||
.local_var_decl,
|
|
||||||
.global_var_decl,
|
|
||||||
=> {
|
|
||||||
if (tags[main_tokens[decl]] != .keyword_var) continue; // skip anything immutable
|
|
||||||
// uncomment this to get a list :)
|
|
||||||
//log.debug("possible global variable \"{s}\"", .{tree.tokenSlice(decl_main_token + 1)});
|
|
||||||
try diagnostics.append(allocator, .{
|
|
||||||
.range = offsets.tokenToRange(tree, decl_main_token, server.offset_encoding),
|
|
||||||
.severity = .Hint,
|
|
||||||
.code = .{ .string = "highlight_global_var_declarations" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = "Global var declaration",
|
|
||||||
});
|
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (handle.interpreter) |int| {
|
|
||||||
try diagnostics.ensureUnusedCapacity(allocator, int.errors.count());
|
|
||||||
|
|
||||||
var err_it = int.errors.iterator();
|
|
||||||
|
|
||||||
while (err_it.next()) |err| {
|
|
||||||
diagnostics.appendAssumeCapacity(.{
|
|
||||||
.range = offsets.nodeToRange(tree, err.key_ptr.*, server.offset_encoding),
|
|
||||||
.severity = .Error,
|
|
||||||
.code = .{ .string = err.value_ptr.code },
|
|
||||||
.source = "zls",
|
|
||||||
.message = err.value_ptr.message,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// try diagnostics.appendSlice(allocator, handle.interpreter.?.diagnostics.items);
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.uri = handle.uri,
|
|
||||||
.diagnostics = diagnostics.items,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getAstCheckDiagnostics(
|
|
||||||
server: *Server,
|
|
||||||
handle: DocumentStore.Handle,
|
|
||||||
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
|
||||||
) !void {
|
|
||||||
var allocator = server.arena.allocator();
|
|
||||||
|
|
||||||
const zig_exe_path = server.config.zig_exe_path orelse return;
|
|
||||||
|
|
||||||
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
|
|
||||||
process.stdin_behavior = .Pipe;
|
|
||||||
process.stderr_behavior = .Pipe;
|
|
||||||
|
|
||||||
process.spawn() catch |err| {
|
|
||||||
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
try process.stdin.?.writeAll(handle.text);
|
|
||||||
process.stdin.?.close();
|
|
||||||
|
|
||||||
process.stdin = null;
|
|
||||||
|
|
||||||
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
|
|
||||||
defer server.allocator.free(stderr_bytes);
|
|
||||||
|
|
||||||
const term = process.wait() catch |err| {
|
|
||||||
log.warn("Failed to await zig ast-check process, error: {}", .{err});
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (term != .Exited) return;
|
|
||||||
|
|
||||||
var last_diagnostic: ?types.Diagnostic = null;
|
|
||||||
// we don't store DiagnosticRelatedInformation in last_diagnostic instead
|
|
||||||
// its stored in last_related_diagnostics because we need an ArrayList
|
|
||||||
var last_related_diagnostics: std.ArrayListUnmanaged(types.DiagnosticRelatedInformation) = .{};
|
|
||||||
|
|
||||||
// NOTE: I believe that with color off it's one diag per line; is this correct?
|
|
||||||
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
|
|
||||||
|
|
||||||
while (line_iterator.next()) |line| lin: {
|
|
||||||
if (!std.mem.startsWith(u8, line, "<stdin>")) continue;
|
|
||||||
|
|
||||||
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
|
|
||||||
const maybe_first = pos_and_diag_iterator.next();
|
|
||||||
if (maybe_first) |first| {
|
|
||||||
if (first.len <= 1) break :lin;
|
|
||||||
} else break;
|
|
||||||
|
|
||||||
const utf8_position = types.Position{
|
|
||||||
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
|
||||||
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
|
||||||
};
|
|
||||||
|
|
||||||
// zig uses utf-8 encoding for character offsets
|
|
||||||
const position = offsets.convertPositionEncoding(handle.text, utf8_position, .@"utf-8", server.offset_encoding);
|
|
||||||
const range = offsets.tokenPositionToRange(handle.text, position, server.offset_encoding);
|
|
||||||
|
|
||||||
const msg = pos_and_diag_iterator.rest()[1..];
|
|
||||||
|
|
||||||
if (std.mem.startsWith(u8, msg, "note: ")) {
|
|
||||||
try last_related_diagnostics.append(allocator, .{
|
|
||||||
.location = .{
|
|
||||||
.uri = handle.uri,
|
|
||||||
.range = range,
|
|
||||||
},
|
|
||||||
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (last_diagnostic) |*diagnostic| {
|
|
||||||
diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator);
|
|
||||||
try diagnostics.append(allocator, diagnostic.*);
|
|
||||||
last_diagnostic = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (std.mem.startsWith(u8, msg, "error: ")) {
|
|
||||||
last_diagnostic = types.Diagnostic{
|
|
||||||
.range = range,
|
|
||||||
.severity = .Error,
|
|
||||||
.code = .{ .string = "ast_check" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
last_diagnostic = types.Diagnostic{
|
|
||||||
.range = range,
|
|
||||||
.severity = .Error,
|
|
||||||
.code = .{ .string = "ast_check" },
|
|
||||||
.source = "zls",
|
|
||||||
.message = try server.arena.allocator().dupe(u8, msg),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (last_diagnostic) |*diagnostic| {
|
|
||||||
diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator);
|
|
||||||
try diagnostics.append(allocator, diagnostic.*);
|
|
||||||
last_diagnostic = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getAutofixMode(server: *Server) enum {
|
fn getAutofixMode(server: *Server) enum {
|
||||||
on_save,
|
on_save,
|
||||||
will_save_wait_until,
|
will_save_wait_until,
|
||||||
@ -507,12 +229,11 @@ fn getAutofixMode(server: *Server) enum {
|
|||||||
/// caller owns returned memory.
|
/// caller owns returned memory.
|
||||||
pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const DocumentStore.Handle) error{OutOfMemory}!std.ArrayListUnmanaged(types.TextEdit) {
|
pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const DocumentStore.Handle) error{OutOfMemory}!std.ArrayListUnmanaged(types.TextEdit) {
|
||||||
if (!server.config.enable_ast_check_diagnostics) return .{};
|
if (!server.config.enable_ast_check_diagnostics) return .{};
|
||||||
|
|
||||||
if (handle.tree.errors.len != 0) return .{};
|
if (handle.tree.errors.len != 0) return .{};
|
||||||
|
|
||||||
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
||||||
getAstCheckDiagnostics(server, handle.*, &diagnostics) catch |err| {
|
try diagnostics_gen.getAstCheckDiagnostics(server, handle.*, &diagnostics);
|
||||||
log.err("failed to run ast-check: {}", .{err});
|
if (diagnostics.items.len == 0) return .{};
|
||||||
};
|
|
||||||
|
|
||||||
var builder = code_actions.Builder{
|
var builder = code_actions.Builder{
|
||||||
.arena = server.arena.allocator(),
|
.arena = server.arena.allocator(),
|
||||||
@ -765,6 +486,16 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
|||||||
, .{server.config.record_session_path});
|
, .{server.config.record_session_path});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (server.config.enable_ast_check_diagnostics and
|
||||||
|
server.config.prefer_ast_check_as_child_process)
|
||||||
|
{
|
||||||
|
if (!std.process.can_spawn) {
|
||||||
|
log.info("'prefer_ast_check_as_child_process' is ignored because your OS can't spawn a child process", .{});
|
||||||
|
} else if (server.config.zig_exe_path == null) {
|
||||||
|
log.info("'prefer_ast_check_as_child_process' is ignored because Zig could not be found", .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.serverInfo = .{
|
.serverInfo = .{
|
||||||
.name = "zls",
|
.name = "zls",
|
||||||
@ -1025,7 +756,7 @@ fn openDocumentHandler(server: *Server, notification: types.DidOpenTextDocumentP
|
|||||||
|
|
||||||
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
||||||
if (!std.process.can_spawn) break :blk;
|
if (!std.process.can_spawn) break :blk;
|
||||||
const diagnostics = try server.generateDiagnostics(handle);
|
const diagnostics = try diagnostics_gen.generateDiagnostics(server, handle);
|
||||||
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1042,7 +773,7 @@ fn changeDocumentHandler(server: *Server, notification: types.DidChangeTextDocum
|
|||||||
|
|
||||||
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
||||||
if (!std.process.can_spawn) break :blk;
|
if (!std.process.can_spawn) break :blk;
|
||||||
const diagnostics = try server.generateDiagnostics(handle.*);
|
const diagnostics = try diagnostics_gen.generateDiagnostics(server, handle.*);
|
||||||
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1054,7 +785,7 @@ fn saveDocumentHandler(server: *Server, notification: types.DidSaveTextDocumentP
|
|||||||
const handle = server.document_store.getHandle(uri) orelse return;
|
const handle = server.document_store.getHandle(uri) orelse return;
|
||||||
try server.document_store.applySave(handle);
|
try server.document_store.applySave(handle);
|
||||||
|
|
||||||
if (std.process.can_spawn and server.getAutofixMode() == .on_save) {
|
if (server.getAutofixMode() == .on_save) {
|
||||||
var text_edits = try server.autofix(allocator, handle);
|
var text_edits = try server.autofix(allocator, handle);
|
||||||
|
|
||||||
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
||||||
@ -1082,7 +813,6 @@ fn willSaveWaitUntilHandler(server: *Server, request: types.WillSaveTextDocument
|
|||||||
|
|
||||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||||
|
|
||||||
if (!std.process.can_spawn) return null;
|
|
||||||
var text_edits = try server.autofix(allocator, handle);
|
var text_edits = try server.autofix(allocator, handle);
|
||||||
|
|
||||||
return try text_edits.toOwnedSlice(allocator);
|
return try text_edits.toOwnedSlice(allocator);
|
||||||
@ -1185,7 +915,7 @@ pub fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Ho
|
|||||||
// TODO: Figure out a better solution for comptime interpreter diags
|
// TODO: Figure out a better solution for comptime interpreter diags
|
||||||
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
if (server.client_capabilities.supports_publish_diagnostics) blk: {
|
||||||
if (!std.process.can_spawn) break :blk;
|
if (!std.process.can_spawn) break :blk;
|
||||||
const diagnostics = try server.generateDiagnostics(handle.*);
|
const diagnostics = try diagnostics_gen.generateDiagnostics(server, handle.*);
|
||||||
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
server.sendNotification("textDocument/publishDiagnostics", diagnostics);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1459,12 +1189,8 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]
|
|||||||
|
|
||||||
// as of right now, only ast-check errors may get a code action
|
// as of right now, only ast-check errors may get a code action
|
||||||
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
||||||
if (server.config.enable_ast_check_diagnostics and handle.tree.errors.len == 0) blk: {
|
if (server.config.enable_ast_check_diagnostics and handle.tree.errors.len == 0) {
|
||||||
if (!std.process.can_spawn) break :blk;
|
try diagnostics_gen.getAstCheckDiagnostics(server, handle.*, &diagnostics);
|
||||||
getAstCheckDiagnostics(server, handle.*, &diagnostics) catch |err| {
|
|
||||||
log.err("failed to run ast-check: {}", .{err});
|
|
||||||
return error.InternalError;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var actions = std.ArrayListUnmanaged(types.CodeAction){};
|
var actions = std.ArrayListUnmanaged(types.CodeAction){};
|
||||||
|
@ -107,6 +107,12 @@
|
|||||||
"type": "usize",
|
"type": "usize",
|
||||||
"default": "1048576"
|
"default": "1048576"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "prefer_ast_check_as_child_process",
|
||||||
|
"description": "Can be used in conjuction with `enable_ast_check_diagnostics` to favor using `zig ast-check` instead of ZLS's fork",
|
||||||
|
"type": "bool",
|
||||||
|
"default": "true"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "record_session",
|
"name": "record_session",
|
||||||
"description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`",
|
"description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`",
|
||||||
|
391
src/features/diagnostics.zig
Normal file
391
src/features/diagnostics.zig
Normal file
@ -0,0 +1,391 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const Ast = std.zig.Ast;
|
||||||
|
const log = std.log.scoped(.zls_diagnostics);
|
||||||
|
|
||||||
|
const Server = @import("../Server.zig");
|
||||||
|
const DocumentStore = @import("../DocumentStore.zig");
|
||||||
|
const types = @import("../lsp.zig");
|
||||||
|
const Analyser = @import("../analysis.zig");
|
||||||
|
const ast = @import("../ast.zig");
|
||||||
|
const offsets = @import("../offsets.zig");
|
||||||
|
const tracy = @import("../tracy.zig");
|
||||||
|
|
||||||
|
const Module = @import("../stage2/Module.zig");
|
||||||
|
const Zir = @import("../stage2/Zir.zig");
|
||||||
|
|
||||||
|
pub fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutOfMemory}!types.PublishDiagnosticsParams {
|
||||||
|
const tracy_zone = tracy.trace(@src());
|
||||||
|
defer tracy_zone.end();
|
||||||
|
|
||||||
|
std.debug.assert(server.client_capabilities.supports_publish_diagnostics);
|
||||||
|
|
||||||
|
const tree = handle.tree;
|
||||||
|
|
||||||
|
var allocator = server.arena.allocator();
|
||||||
|
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
||||||
|
|
||||||
|
for (tree.errors) |err| {
|
||||||
|
var mem_buffer: [256]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&mem_buffer);
|
||||||
|
tree.renderError(err, fbs.writer()) catch if (std.debug.runtime_safety) unreachable else continue; // if an error occurs here increase buffer size
|
||||||
|
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.tokenToRange(tree, err.token, server.offset_encoding),
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = @tagName(err.tag) },
|
||||||
|
.source = "zls",
|
||||||
|
.message = try server.arena.allocator().dupe(u8, fbs.getWritten()),
|
||||||
|
// .relatedInformation = undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) {
|
||||||
|
try getAstCheckDiagnostics(server, handle, &diagnostics);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (server.config.warn_style) {
|
||||||
|
var node: u32 = 0;
|
||||||
|
while (node < tree.nodes.len) : (node += 1) {
|
||||||
|
if (ast.isBuiltinCall(tree, node)) {
|
||||||
|
const builtin_token = tree.nodes.items(.main_token)[node];
|
||||||
|
const call_name = tree.tokenSlice(builtin_token);
|
||||||
|
|
||||||
|
if (!std.mem.eql(u8, call_name, "@import")) continue;
|
||||||
|
|
||||||
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
|
|
||||||
|
if (params.len != 1) continue;
|
||||||
|
|
||||||
|
const import_str_token = tree.nodes.items(.main_token)[params[0]];
|
||||||
|
const import_str = tree.tokenSlice(import_str_token);
|
||||||
|
|
||||||
|
if (std.mem.startsWith(u8, import_str, "\"./")) {
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding),
|
||||||
|
.severity = .Hint,
|
||||||
|
.code = .{ .string = "dot_slash_import" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = "A ./ is not needed in imports",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: style warnings for types, values and declarations below root scope
|
||||||
|
if (tree.errors.len == 0) {
|
||||||
|
for (tree.rootDecls()) |decl_idx| {
|
||||||
|
const decl = tree.nodes.items(.tag)[decl_idx];
|
||||||
|
switch (decl) {
|
||||||
|
.fn_proto,
|
||||||
|
.fn_proto_multi,
|
||||||
|
.fn_proto_one,
|
||||||
|
.fn_proto_simple,
|
||||||
|
.fn_decl,
|
||||||
|
=> blk: {
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const func = tree.fullFnProto(&buf, decl_idx).?;
|
||||||
|
if (func.extern_export_inline_token != null) break :blk;
|
||||||
|
|
||||||
|
if (func.name_token) |name_token| {
|
||||||
|
const is_type_function = Analyser.isTypeFunction(tree, func);
|
||||||
|
|
||||||
|
const func_name = tree.tokenSlice(name_token);
|
||||||
|
if (!is_type_function and !Analyser.isCamelCase(func_name)) {
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||||
|
.severity = .Hint,
|
||||||
|
.code = .{ .string = "bad_style" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = "Functions should be camelCase",
|
||||||
|
});
|
||||||
|
} else if (is_type_function and !Analyser.isPascalCase(func_name)) {
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||||
|
.severity = .Hint,
|
||||||
|
.code = .{ .string = "bad_style" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = "Type functions should be PascalCase",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (handle.cimports.items(.hash), handle.cimports.items(.node)) |hash, node| {
|
||||||
|
const result = server.document_store.cimports.get(hash) orelse continue;
|
||||||
|
if (result != .failure) continue;
|
||||||
|
const stderr = std.mem.trim(u8, result.failure, " ");
|
||||||
|
|
||||||
|
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
|
||||||
|
_ = pos_and_diag_iterator.next(); // skip file path
|
||||||
|
_ = pos_and_diag_iterator.next(); // skip line
|
||||||
|
_ = pos_and_diag_iterator.next(); // skip character
|
||||||
|
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.nodeToRange(handle.tree, node, server.offset_encoding),
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = "cImport" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = try allocator.dupe(u8, pos_and_diag_iterator.rest()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (server.config.highlight_global_var_declarations) {
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
const tags = tree.tokens.items(.tag);
|
||||||
|
for (tree.rootDecls()) |decl| {
|
||||||
|
const decl_tag = tree.nodes.items(.tag)[decl];
|
||||||
|
const decl_main_token = tree.nodes.items(.main_token)[decl];
|
||||||
|
|
||||||
|
switch (decl_tag) {
|
||||||
|
.simple_var_decl,
|
||||||
|
.aligned_var_decl,
|
||||||
|
.local_var_decl,
|
||||||
|
.global_var_decl,
|
||||||
|
=> {
|
||||||
|
if (tags[main_tokens[decl]] != .keyword_var) continue; // skip anything immutable
|
||||||
|
// uncomment this to get a list :)
|
||||||
|
//log.debug("possible global variable \"{s}\"", .{tree.tokenSlice(decl_main_token + 1)});
|
||||||
|
try diagnostics.append(allocator, .{
|
||||||
|
.range = offsets.tokenToRange(tree, decl_main_token, server.offset_encoding),
|
||||||
|
.severity = .Hint,
|
||||||
|
.code = .{ .string = "highlight_global_var_declarations" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = "Global var declaration",
|
||||||
|
});
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (handle.interpreter) |int| {
|
||||||
|
try diagnostics.ensureUnusedCapacity(allocator, int.errors.count());
|
||||||
|
|
||||||
|
var err_it = int.errors.iterator();
|
||||||
|
|
||||||
|
while (err_it.next()) |err| {
|
||||||
|
diagnostics.appendAssumeCapacity(.{
|
||||||
|
.range = offsets.nodeToRange(tree, err.key_ptr.*, server.offset_encoding),
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = err.value_ptr.code },
|
||||||
|
.source = "zls",
|
||||||
|
.message = err.value_ptr.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// try diagnostics.appendSlice(allocator, handle.interpreter.?.diagnostics.items);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.uri = handle.uri,
|
||||||
|
.diagnostics = diagnostics.items,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn getAstCheckDiagnostics(
|
||||||
|
server: *Server,
|
||||||
|
handle: DocumentStore.Handle,
|
||||||
|
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
||||||
|
) error{OutOfMemory}!void {
|
||||||
|
std.debug.assert(server.config.enable_ast_check_diagnostics);
|
||||||
|
std.debug.assert(handle.tree.errors.len == 0);
|
||||||
|
|
||||||
|
if (server.config.prefer_ast_check_as_child_process and
|
||||||
|
std.process.can_spawn and
|
||||||
|
server.config.zig_exe_path != null)
|
||||||
|
{
|
||||||
|
getDiagnosticsFromAstCheck(server, handle, diagnostics) catch |err| {
|
||||||
|
log.err("failed to run ast-check: {}", .{err});
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
std.debug.assert(server.document_store.wantZir());
|
||||||
|
switch (handle.zir_status) {
|
||||||
|
.none, .outdated => {},
|
||||||
|
.done => try getDiagnosticsFromZir(server, handle, diagnostics),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getDiagnosticsFromAstCheck(
|
||||||
|
server: *Server,
|
||||||
|
handle: DocumentStore.Handle,
|
||||||
|
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
||||||
|
) !void {
|
||||||
|
comptime std.debug.assert(std.process.can_spawn);
|
||||||
|
std.debug.assert(server.config.zig_exe_path != null);
|
||||||
|
var allocator = server.arena.allocator();
|
||||||
|
|
||||||
|
const zig_exe_path = server.config.zig_exe_path.?;
|
||||||
|
|
||||||
|
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
|
||||||
|
process.stdin_behavior = .Pipe;
|
||||||
|
process.stderr_behavior = .Pipe;
|
||||||
|
|
||||||
|
process.spawn() catch |err| {
|
||||||
|
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
try process.stdin.?.writeAll(handle.text);
|
||||||
|
process.stdin.?.close();
|
||||||
|
|
||||||
|
process.stdin = null;
|
||||||
|
|
||||||
|
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
|
||||||
|
defer server.allocator.free(stderr_bytes);
|
||||||
|
|
||||||
|
const term = process.wait() catch |err| {
|
||||||
|
log.warn("Failed to await zig ast-check process, error: {}", .{err});
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (term != .Exited) return;
|
||||||
|
|
||||||
|
var last_diagnostic: ?types.Diagnostic = null;
|
||||||
|
// we don't store DiagnosticRelatedInformation in last_diagnostic instead
|
||||||
|
// its stored in last_related_diagnostics because we need an ArrayList
|
||||||
|
var last_related_diagnostics: std.ArrayListUnmanaged(types.DiagnosticRelatedInformation) = .{};
|
||||||
|
|
||||||
|
// NOTE: I believe that with color off it's one diag per line; is this correct?
|
||||||
|
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
|
||||||
|
|
||||||
|
while (line_iterator.next()) |line| lin: {
|
||||||
|
if (!std.mem.startsWith(u8, line, "<stdin>")) continue;
|
||||||
|
|
||||||
|
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
|
||||||
|
const maybe_first = pos_and_diag_iterator.next();
|
||||||
|
if (maybe_first) |first| {
|
||||||
|
if (first.len <= 1) break :lin;
|
||||||
|
} else break;
|
||||||
|
|
||||||
|
const utf8_position = types.Position{
|
||||||
|
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||||
|
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
// zig uses utf-8 encoding for character offsets
|
||||||
|
const position = offsets.convertPositionEncoding(handle.text, utf8_position, .@"utf-8", server.offset_encoding);
|
||||||
|
const range = offsets.tokenPositionToRange(handle.text, position, server.offset_encoding);
|
||||||
|
|
||||||
|
const msg = pos_and_diag_iterator.rest()[1..];
|
||||||
|
|
||||||
|
if (std.mem.startsWith(u8, msg, "note: ")) {
|
||||||
|
try last_related_diagnostics.append(allocator, .{
|
||||||
|
.location = .{
|
||||||
|
.uri = handle.uri,
|
||||||
|
.range = range,
|
||||||
|
},
|
||||||
|
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last_diagnostic) |*diagnostic| {
|
||||||
|
diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator);
|
||||||
|
try diagnostics.append(allocator, diagnostic.*);
|
||||||
|
last_diagnostic = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.startsWith(u8, msg, "error: ")) {
|
||||||
|
last_diagnostic = types.Diagnostic{
|
||||||
|
.range = range,
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = "ast_check" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
last_diagnostic = types.Diagnostic{
|
||||||
|
.range = range,
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = "ast_check" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = try server.arena.allocator().dupe(u8, msg),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last_diagnostic) |*diagnostic| {
|
||||||
|
diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator);
|
||||||
|
try diagnostics.append(allocator, diagnostic.*);
|
||||||
|
last_diagnostic = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getDiagnosticsFromZir(
|
||||||
|
server: *Server,
|
||||||
|
handle: DocumentStore.Handle,
|
||||||
|
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
||||||
|
) error{OutOfMemory}!void {
|
||||||
|
std.debug.assert(handle.zir_status != .none);
|
||||||
|
|
||||||
|
const allocator = server.arena.allocator();
|
||||||
|
|
||||||
|
const payload_index = handle.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)];
|
||||||
|
if (payload_index == 0) return;
|
||||||
|
|
||||||
|
const header = handle.zir.extraData(Zir.Inst.CompileErrors, payload_index);
|
||||||
|
const items_len = header.data.items_len;
|
||||||
|
|
||||||
|
try diagnostics.ensureUnusedCapacity(allocator, items_len);
|
||||||
|
|
||||||
|
var extra_index = header.end;
|
||||||
|
for (0..items_len) |_| {
|
||||||
|
const item = handle.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
|
||||||
|
extra_index = item.end;
|
||||||
|
const err_loc = blk: {
|
||||||
|
if (item.data.node != 0) {
|
||||||
|
break :blk offsets.nodeToLoc(handle.tree, item.data.node);
|
||||||
|
}
|
||||||
|
const loc = offsets.tokenToLoc(handle.tree, item.data.token);
|
||||||
|
break :blk offsets.Loc{
|
||||||
|
.start = loc.start + item.data.byte_offset,
|
||||||
|
.end = loc.end,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
var notes: []types.DiagnosticRelatedInformation = &.{};
|
||||||
|
if (item.data.notes != 0) {
|
||||||
|
const block = handle.zir.extraData(Zir.Inst.Block, item.data.notes);
|
||||||
|
const body = handle.zir.extra[block.end..][0..block.data.body_len];
|
||||||
|
notes = try allocator.alloc(types.DiagnosticRelatedInformation, body.len);
|
||||||
|
for (notes, body) |*note, note_index| {
|
||||||
|
const note_item = handle.zir.extraData(Zir.Inst.CompileErrors.Item, note_index);
|
||||||
|
const msg = handle.zir.nullTerminatedString(note_item.data.msg);
|
||||||
|
|
||||||
|
const loc = blk: {
|
||||||
|
if (note_item.data.node != 0) {
|
||||||
|
break :blk offsets.nodeToLoc(handle.tree, note_item.data.node);
|
||||||
|
}
|
||||||
|
const loc = offsets.tokenToLoc(handle.tree, note_item.data.token);
|
||||||
|
break :blk offsets.Loc{
|
||||||
|
.start = loc.start + note_item.data.byte_offset,
|
||||||
|
.end = loc.end,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
note.* = .{
|
||||||
|
.location = .{
|
||||||
|
.uri = handle.uri,
|
||||||
|
.range = offsets.locToRange(handle.text, loc, server.offset_encoding),
|
||||||
|
},
|
||||||
|
.message = msg,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const msg = handle.zir.nullTerminatedString(item.data.msg);
|
||||||
|
diagnostics.appendAssumeCapacity(.{
|
||||||
|
.range = offsets.locToRange(handle.text, err_loc, server.offset_encoding),
|
||||||
|
.severity = .Error,
|
||||||
|
.code = .{ .string = "ast_check" },
|
||||||
|
.source = "zls",
|
||||||
|
.message = msg,
|
||||||
|
.relatedInformation = if (notes.len != 0) notes else null,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
12821
src/stage2/AstGen.zig
Normal file
12821
src/stage2/AstGen.zig
Normal file
File diff suppressed because it is too large
Load Diff
1007
src/stage2/BuiltinFn.zig
Normal file
1007
src/stage2/BuiltinFn.zig
Normal file
File diff suppressed because it is too large
Load Diff
21
src/stage2/LICENSE
Normal file
21
src/stage2/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (Expat)
|
||||||
|
|
||||||
|
Copyright (c) 2015-2023, Zig contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
960
src/stage2/Module.zig
Normal file
960
src/stage2/Module.zig
Normal file
@ -0,0 +1,960 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const assert = std.debug.assert;
|
||||||
|
const log = std.log.scoped(.module);
|
||||||
|
const Ast = std.zig.Ast;
|
||||||
|
|
||||||
|
const Module = @This();
|
||||||
|
const DocumentStore = @import("../DocumentStore.zig");
|
||||||
|
const Handle = DocumentStore.Handle;
|
||||||
|
|
||||||
|
/// Canonical reference to a position within a source file.
|
||||||
|
pub const SrcLoc = struct {
|
||||||
|
handle: *Handle,
|
||||||
|
/// Might be 0 depending on tag of `lazy`.
|
||||||
|
parent_decl_node: Ast.Node.Index,
|
||||||
|
/// Relative to `parent_decl_node`.
|
||||||
|
lazy: LazySrcLoc,
|
||||||
|
|
||||||
|
pub fn declSrcToken(src_loc: SrcLoc) Ast.TokenIndex {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
return tree.firstToken(src_loc.parent_decl_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn declRelativeToNodeIndex(src_loc: SrcLoc, offset: i32) Ast.TokenIndex {
|
||||||
|
return @bitCast(Ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const Span = struct {
|
||||||
|
start: u32,
|
||||||
|
end: u32,
|
||||||
|
main: u32,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn span(src_loc: SrcLoc) Span {
|
||||||
|
switch (src_loc.lazy) {
|
||||||
|
.unneeded => unreachable,
|
||||||
|
.entire_file => return Span{ .start = 0, .end = 1, .main = 0 },
|
||||||
|
|
||||||
|
.byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1, .main = byte_index },
|
||||||
|
|
||||||
|
.token_abs => |tok_index| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const start = tree.tokens.items(.start)[tok_index];
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.node_abs => |node| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
return nodeToSpan(tree, node);
|
||||||
|
},
|
||||||
|
.byte_offset => |byte_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const tok_index = src_loc.declSrcToken();
|
||||||
|
const start = tree.tokens.items(.start)[tok_index] + byte_off;
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.token_offset => |tok_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const tok_index = src_loc.declSrcToken() + tok_off;
|
||||||
|
const start = tree.tokens.items(.start)[tok_index];
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.node_offset => |traced_off| {
|
||||||
|
const node_off = traced_off.x;
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
assert(src_loc.handle.tree_loaded);
|
||||||
|
return nodeToSpan(tree, node);
|
||||||
|
},
|
||||||
|
.node_offset_main_token => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const main_token = tree.nodes.items(.main_token)[node];
|
||||||
|
return tokensToSpan(tree, main_token, main_token, main_token);
|
||||||
|
},
|
||||||
|
.node_offset_bin_op => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
assert(src_loc.handle.tree_loaded);
|
||||||
|
return nodeToSpan(tree, node);
|
||||||
|
},
|
||||||
|
.node_offset_initializer => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
return tokensToSpan(
|
||||||
|
tree,
|
||||||
|
tree.firstToken(node) - 3,
|
||||||
|
tree.lastToken(node),
|
||||||
|
tree.nodes.items(.main_token)[node] - 2,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.node_offset_var_decl_ty => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const full = switch (node_tags[node]) {
|
||||||
|
.global_var_decl,
|
||||||
|
.local_var_decl,
|
||||||
|
.simple_var_decl,
|
||||||
|
.aligned_var_decl,
|
||||||
|
=> tree.fullVarDecl(node).?,
|
||||||
|
.@"usingnamespace" => {
|
||||||
|
const node_data = tree.nodes.items(.data);
|
||||||
|
return nodeToSpan(tree, node_data[node].lhs);
|
||||||
|
},
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
if (full.ast.type_node != 0) {
|
||||||
|
return nodeToSpan(tree, full.ast.type_node);
|
||||||
|
}
|
||||||
|
const tok_index = full.ast.mut_token + 1; // the name token
|
||||||
|
const start = tree.tokens.items(.start)[tok_index];
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.node_offset_var_decl_align => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullVarDecl(node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.align_node);
|
||||||
|
},
|
||||||
|
.node_offset_var_decl_section => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullVarDecl(node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.section_node);
|
||||||
|
},
|
||||||
|
.node_offset_var_decl_addrspace => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullVarDecl(node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.addrspace_node);
|
||||||
|
},
|
||||||
|
.node_offset_var_decl_init => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullVarDecl(node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.init_node);
|
||||||
|
},
|
||||||
|
.node_offset_builtin_call_arg0 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 0),
|
||||||
|
.node_offset_builtin_call_arg1 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 1),
|
||||||
|
.node_offset_builtin_call_arg2 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 2),
|
||||||
|
.node_offset_builtin_call_arg3 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 3),
|
||||||
|
.node_offset_builtin_call_arg4 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 4),
|
||||||
|
.node_offset_builtin_call_arg5 => |n| return src_loc.byteOffsetBuiltinCallArg(n, 5),
|
||||||
|
.node_offset_array_access_index => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
return nodeToSpan(tree, node_datas[node].rhs);
|
||||||
|
},
|
||||||
|
.node_offset_slice_ptr,
|
||||||
|
.node_offset_slice_start,
|
||||||
|
.node_offset_slice_end,
|
||||||
|
.node_offset_slice_sentinel,
|
||||||
|
=> |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullSlice(node).?;
|
||||||
|
const part_node = switch (src_loc.lazy) {
|
||||||
|
.node_offset_slice_ptr => full.ast.sliced,
|
||||||
|
.node_offset_slice_start => full.ast.start,
|
||||||
|
.node_offset_slice_end => full.ast.end,
|
||||||
|
.node_offset_slice_sentinel => full.ast.sentinel,
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
return nodeToSpan(tree, part_node);
|
||||||
|
},
|
||||||
|
.node_offset_call_func => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullCall(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.fn_expr);
|
||||||
|
},
|
||||||
|
.node_offset_field_name => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const tok_index = switch (node_tags[node]) {
|
||||||
|
.field_access => node_datas[node].rhs,
|
||||||
|
else => tree.firstToken(node) - 2,
|
||||||
|
};
|
||||||
|
const start = tree.tokens.items(.start)[tok_index];
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.node_offset_deref_ptr => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
return nodeToSpan(tree, node);
|
||||||
|
},
|
||||||
|
.node_offset_asm_source => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullAsm(node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.template);
|
||||||
|
},
|
||||||
|
.node_offset_asm_ret_ty => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const full = tree.fullAsm(node).?;
|
||||||
|
const asm_output = full.outputs[0];
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
return nodeToSpan(tree, node_datas[asm_output].lhs);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_for_cond, .node_offset_if_cond => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const src_node = switch (node_tags[node]) {
|
||||||
|
.if_simple,
|
||||||
|
.@"if",
|
||||||
|
=> tree.fullIf(node).?.ast.cond_expr,
|
||||||
|
|
||||||
|
.while_simple,
|
||||||
|
.while_cont,
|
||||||
|
.@"while",
|
||||||
|
.for_simple,
|
||||||
|
.@"for",
|
||||||
|
=> tree.fullWhile(node).?.ast.cond_expr,
|
||||||
|
|
||||||
|
.@"orelse" => node,
|
||||||
|
.@"catch" => node,
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
return nodeToSpan(tree, src_node);
|
||||||
|
},
|
||||||
|
.node_offset_bin_lhs => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
return nodeToSpan(tree, node_datas[node].lhs);
|
||||||
|
},
|
||||||
|
.node_offset_bin_rhs => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
return nodeToSpan(tree, node_datas[node].rhs);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_switch_operand => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
return nodeToSpan(tree, node_datas[node].lhs);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_switch_special_prong => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const switch_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
|
||||||
|
const case_nodes = tree.extra_data[extra.start..extra.end];
|
||||||
|
for (case_nodes) |case_node| {
|
||||||
|
const case = tree.fullSwitchCase(case_node).?;
|
||||||
|
const is_special = (case.ast.values.len == 0) or
|
||||||
|
(case.ast.values.len == 1 and
|
||||||
|
node_tags[case.ast.values[0]] == .identifier and
|
||||||
|
std.mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
|
||||||
|
if (!is_special) continue;
|
||||||
|
|
||||||
|
return nodeToSpan(tree, case_node);
|
||||||
|
} else unreachable;
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_switch_range => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const switch_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
|
||||||
|
const case_nodes = tree.extra_data[extra.start..extra.end];
|
||||||
|
for (case_nodes) |case_node| {
|
||||||
|
const case = tree.fullSwitchCase(case_node).?;
|
||||||
|
const is_special = (case.ast.values.len == 0) or
|
||||||
|
(case.ast.values.len == 1 and
|
||||||
|
node_tags[case.ast.values[0]] == .identifier and
|
||||||
|
std.mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
|
||||||
|
if (is_special) continue;
|
||||||
|
|
||||||
|
for (case.ast.values) |item_node| {
|
||||||
|
if (node_tags[item_node] == .switch_range) {
|
||||||
|
return nodeToSpan(tree, item_node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else unreachable;
|
||||||
|
},
|
||||||
|
.node_offset_switch_prong_capture => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const case_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const case = tree.fullSwitchCase(case_node).?;
|
||||||
|
const start_tok = case.payload_token.?;
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
const end_tok = switch (token_tags[start_tok]) {
|
||||||
|
.asterisk => start_tok + 1,
|
||||||
|
else => start_tok,
|
||||||
|
};
|
||||||
|
const start = tree.tokens.items(.start)[start_tok];
|
||||||
|
const end_start = tree.tokens.items(.start)[end_tok];
|
||||||
|
const end = end_start + @intCast(u32, tree.tokenSlice(end_tok).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
.node_offset_fn_type_align => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.align_expr);
|
||||||
|
},
|
||||||
|
.node_offset_fn_type_addrspace => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.addrspace_expr);
|
||||||
|
},
|
||||||
|
.node_offset_fn_type_section => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.section_expr);
|
||||||
|
},
|
||||||
|
.node_offset_fn_type_cc => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.callconv_expr);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_fn_type_ret_ty => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.return_type);
|
||||||
|
},
|
||||||
|
.node_offset_param => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
var first_tok = tree.firstToken(node);
|
||||||
|
while (true) switch (token_tags[first_tok - 1]) {
|
||||||
|
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
|
||||||
|
else => break,
|
||||||
|
};
|
||||||
|
return tokensToSpan(
|
||||||
|
tree,
|
||||||
|
first_tok,
|
||||||
|
tree.lastToken(node),
|
||||||
|
first_tok,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.token_offset_param => |token_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
const main_token = tree.nodes.items(.main_token)[src_loc.parent_decl_node];
|
||||||
|
const tok_index = @bitCast(Ast.TokenIndex, token_off + @bitCast(i32, main_token));
|
||||||
|
|
||||||
|
var first_tok = tok_index;
|
||||||
|
while (true) switch (token_tags[first_tok - 1]) {
|
||||||
|
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
|
||||||
|
else => break,
|
||||||
|
};
|
||||||
|
return tokensToSpan(
|
||||||
|
tree,
|
||||||
|
first_tok,
|
||||||
|
tok_index,
|
||||||
|
first_tok,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_anyframe_type => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
return nodeToSpan(tree, node_datas[parent_node].rhs);
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_lib_name => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullFnProto(&buf, parent_node).?;
|
||||||
|
const tok_index = full.lib_name.?;
|
||||||
|
const start = tree.tokens.items(.start)[tok_index];
|
||||||
|
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||||
|
return Span{ .start = start, .end = end, .main = start };
|
||||||
|
},
|
||||||
|
|
||||||
|
.node_offset_array_type_len => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullArrayType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.elem_count);
|
||||||
|
},
|
||||||
|
.node_offset_array_type_sentinel => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullArrayType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.sentinel);
|
||||||
|
},
|
||||||
|
.node_offset_array_type_elem => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullArrayType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.elem_type);
|
||||||
|
},
|
||||||
|
.node_offset_un_op => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
return nodeToSpan(tree, node_datas[node].lhs);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_elem => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.child_type);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_sentinel => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.sentinel);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_align => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.align_node);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_addrspace => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.addrspace_node);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_bitoffset => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.bit_range_start);
|
||||||
|
},
|
||||||
|
.node_offset_ptr_hostsize => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full = tree.fullPtrType(parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.bit_range_end);
|
||||||
|
},
|
||||||
|
.node_offset_container_tag => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
switch (node_tags[parent_node]) {
|
||||||
|
.container_decl_arg, .container_decl_arg_trailing => {
|
||||||
|
const full = tree.containerDeclArg(parent_node);
|
||||||
|
return nodeToSpan(tree, full.ast.arg);
|
||||||
|
},
|
||||||
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => {
|
||||||
|
const full = tree.taggedUnionEnumTag(parent_node);
|
||||||
|
|
||||||
|
return tokensToSpan(
|
||||||
|
tree,
|
||||||
|
tree.firstToken(full.ast.arg) - 2,
|
||||||
|
tree.lastToken(full.ast.arg) + 1,
|
||||||
|
tree.nodes.items(.main_token)[full.ast.arg],
|
||||||
|
);
|
||||||
|
},
|
||||||
|
else => unreachable,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.node_offset_field_default => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
const full: Ast.full.ContainerField = switch (node_tags[parent_node]) {
|
||||||
|
.container_field => tree.containerField(parent_node),
|
||||||
|
.container_field_init => tree.containerFieldInit(parent_node),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
return nodeToSpan(tree, full.ast.value_expr);
|
||||||
|
},
|
||||||
|
.node_offset_init_ty => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
const full = tree.fullArrayInit(&buf, parent_node).?;
|
||||||
|
return nodeToSpan(tree, full.ast.type_expr);
|
||||||
|
},
|
||||||
|
.node_offset_store_ptr => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
switch (node_tags[node]) {
|
||||||
|
.assign => {
|
||||||
|
return nodeToSpan(tree, node_datas[node].lhs);
|
||||||
|
},
|
||||||
|
else => return nodeToSpan(tree, node),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.node_offset_store_operand => |node_off| {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
|
||||||
|
switch (node_tags[node]) {
|
||||||
|
.assign => {
|
||||||
|
return nodeToSpan(tree, node_datas[node].rhs);
|
||||||
|
},
|
||||||
|
else => return nodeToSpan(tree, node),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn byteOffsetBuiltinCallArg(
|
||||||
|
src_loc: SrcLoc,
|
||||||
|
node_off: i32,
|
||||||
|
arg_index: u32,
|
||||||
|
) !Span {
|
||||||
|
const tree = src_loc.handle.tree;
|
||||||
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||||
|
const param = switch (node_tags[node]) {
|
||||||
|
.builtin_call_two, .builtin_call_two_comma => switch (arg_index) {
|
||||||
|
0 => node_datas[node].lhs,
|
||||||
|
1 => node_datas[node].rhs,
|
||||||
|
else => unreachable,
|
||||||
|
},
|
||||||
|
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + arg_index],
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
return nodeToSpan(tree, param);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
|
||||||
|
return tokensToSpan(
|
||||||
|
tree,
|
||||||
|
tree.firstToken(node),
|
||||||
|
tree.lastToken(node),
|
||||||
|
tree.nodes.items(.main_token)[node],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
|
||||||
|
const token_starts = tree.tokens.items(.start);
|
||||||
|
var start_tok = start;
|
||||||
|
var end_tok = end;
|
||||||
|
|
||||||
|
if (tree.tokensOnSameLine(start, end)) {
|
||||||
|
// do nothing
|
||||||
|
} else if (tree.tokensOnSameLine(start, main)) {
|
||||||
|
end_tok = main;
|
||||||
|
} else if (tree.tokensOnSameLine(main, end)) {
|
||||||
|
start_tok = main;
|
||||||
|
} else {
|
||||||
|
start_tok = main;
|
||||||
|
end_tok = main;
|
||||||
|
}
|
||||||
|
const start_off = token_starts[start_tok];
|
||||||
|
const end_off = token_starts[end_tok] + @intCast(u32, tree.tokenSlice(end_tok).len);
|
||||||
|
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Resolving a source location into a byte offset may require doing work
|
||||||
|
/// that we would rather not do unless the error actually occurs.
|
||||||
|
/// Therefore we need a data structure that contains the information necessary
|
||||||
|
/// to lazily produce a `SrcLoc` as required.
|
||||||
|
/// Most of the offsets in this data structure are relative to the containing Decl.
|
||||||
|
/// This makes the source location resolve properly even when a Decl gets
|
||||||
|
/// shifted up or down in the file, as long as the Decl's contents itself
|
||||||
|
/// do not change.
|
||||||
|
pub const LazySrcLoc = union(enum) {
|
||||||
|
/// When this tag is set, the code that constructed this `LazySrcLoc` is asserting
|
||||||
|
/// that all code paths which would need to resolve the source location are
|
||||||
|
/// unreachable. If you are debugging this tag incorrectly being this value,
|
||||||
|
/// look into using reverse-continue with a memory watchpoint to see where the
|
||||||
|
/// value is being set to this tag.
|
||||||
|
unneeded,
|
||||||
|
/// Means the source location points to an entire file; not any particular
|
||||||
|
/// location within the file. `handle` union field will be active.
|
||||||
|
entire_file,
|
||||||
|
/// The source location points to a byte offset within a source file,
|
||||||
|
/// offset from 0. The source file is determined contextually.
|
||||||
|
/// Inside a `SrcLoc`, the `handle` union field will be active.
|
||||||
|
byte_abs: u32,
|
||||||
|
/// The source location points to a token within a source file,
|
||||||
|
/// offset from 0. The source file is determined contextually.
|
||||||
|
/// Inside a `SrcLoc`, the `handle` union field will be active.
|
||||||
|
token_abs: u32,
|
||||||
|
/// The source location points to an AST node within a source file,
|
||||||
|
/// offset from 0. The source file is determined contextually.
|
||||||
|
/// Inside a `SrcLoc`, the `handle` union field will be active.
|
||||||
|
node_abs: u32,
|
||||||
|
/// The source location points to a byte offset within a source file,
|
||||||
|
/// offset from the byte offset of the Decl within the file.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
byte_offset: u32,
|
||||||
|
/// This data is the offset into the token list from the Decl token.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
token_offset: u32,
|
||||||
|
/// The source location points to an AST node, which is this value offset
|
||||||
|
/// from its containing Decl node AST index.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset: i32,
|
||||||
|
/// The source location points to the main token of an AST node, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_main_token: i32,
|
||||||
|
/// The source location points to the beginning of a struct initializer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_initializer: i32,
|
||||||
|
/// The source location points to a variable declaration type expression,
|
||||||
|
/// found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a variable declaration AST node. Next, navigate
|
||||||
|
/// to the type expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_var_decl_ty: i32,
|
||||||
|
/// The source location points to the alignment expression of a var decl.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_var_decl_align: i32,
|
||||||
|
/// The source location points to the linksection expression of a var decl.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_var_decl_section: i32,
|
||||||
|
/// The source location points to the addrspace expression of a var decl.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_var_decl_addrspace: i32,
|
||||||
|
/// The source location points to the initializer of a var decl.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_var_decl_init: i32,
|
||||||
|
/// The source location points to a for loop condition expression,
|
||||||
|
/// found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a for loop AST node. Next, navigate
|
||||||
|
/// to the condition expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_for_cond: i32,
|
||||||
|
/// The source location points to the first parameter of a builtin
|
||||||
|
/// function call, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a builtin call AST node. Next, navigate
|
||||||
|
/// to the first parameter.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_builtin_call_arg0: i32,
|
||||||
|
/// Same as `node_offset_builtin_call_arg0` except arg index 1.
|
||||||
|
node_offset_builtin_call_arg1: i32,
|
||||||
|
node_offset_builtin_call_arg2: i32,
|
||||||
|
node_offset_builtin_call_arg3: i32,
|
||||||
|
node_offset_builtin_call_arg4: i32,
|
||||||
|
node_offset_builtin_call_arg5: i32,
|
||||||
|
/// The source location points to the index expression of an array access
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to an array access AST node. Next, navigate
|
||||||
|
/// to the index expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_array_access_index: i32,
|
||||||
|
/// The source location points to the LHS of a slice expression
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a slice AST node. Next, navigate
|
||||||
|
/// to the sentinel expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_slice_ptr: i32,
|
||||||
|
/// The source location points to start expression of a slice expression
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a slice AST node. Next, navigate
|
||||||
|
/// to the sentinel expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_slice_start: i32,
|
||||||
|
/// The source location points to the end expression of a slice
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a slice AST node. Next, navigate
|
||||||
|
/// to the sentinel expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_slice_end: i32,
|
||||||
|
/// The source location points to the sentinel expression of a slice
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a slice AST node. Next, navigate
|
||||||
|
/// to the sentinel expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_slice_sentinel: i32,
|
||||||
|
/// The source location points to the callee expression of a function
|
||||||
|
/// call expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function call AST node. Next, navigate
|
||||||
|
/// to the callee expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_call_func: i32,
|
||||||
|
/// The payload is offset from the containing Decl AST node.
|
||||||
|
/// The source location points to the field name of:
|
||||||
|
/// * a field access expression (`a.b`), or
|
||||||
|
/// * the operand ("b" node) of a field initialization expression (`.a = b`)
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_field_name: i32,
|
||||||
|
/// The source location points to the pointer of a pointer deref expression,
|
||||||
|
/// found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a pointer deref AST node. Next, navigate
|
||||||
|
/// to the pointer expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_deref_ptr: i32,
|
||||||
|
/// The source location points to the assembly source code of an inline assembly
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to inline assembly AST node. Next, navigate
|
||||||
|
/// to the asm template source code.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_asm_source: i32,
|
||||||
|
/// The source location points to the return type of an inline assembly
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to inline assembly AST node. Next, navigate
|
||||||
|
/// to the return type expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_asm_ret_ty: i32,
|
||||||
|
/// The source location points to the condition expression of an if
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to an if expression AST node. Next, navigate
|
||||||
|
/// to the condition expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_if_cond: i32,
|
||||||
|
/// The source location points to a binary expression, such as `a + b`, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_bin_op: i32,
|
||||||
|
/// The source location points to the LHS of a binary expression, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node,
|
||||||
|
/// which points to a binary expression AST node. Next, navigate to the LHS.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_bin_lhs: i32,
|
||||||
|
/// The source location points to the RHS of a binary expression, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node,
|
||||||
|
/// which points to a binary expression AST node. Next, navigate to the RHS.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_bin_rhs: i32,
|
||||||
|
/// The source location points to the operand of a switch expression, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node,
|
||||||
|
/// which points to a switch expression AST node. Next, navigate to the operand.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_switch_operand: i32,
|
||||||
|
/// The source location points to the else/`_` prong of a switch expression, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node,
|
||||||
|
/// which points to a switch expression AST node. Next, navigate to the else/`_` prong.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_switch_special_prong: i32,
|
||||||
|
/// The source location points to all the ranges of a switch expression, found
|
||||||
|
/// by taking this AST node index offset from the containing Decl AST node,
|
||||||
|
/// which points to a switch expression AST node. Next, navigate to any of the
|
||||||
|
/// range nodes. The error applies to all of them.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_switch_range: i32,
|
||||||
|
/// The source location points to the capture of a switch_prong.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_switch_prong_capture: i32,
|
||||||
|
/// The source location points to the align expr of a function type
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function type AST node. Next, navigate to
|
||||||
|
/// the calling convention node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_fn_type_align: i32,
|
||||||
|
/// The source location points to the addrspace expr of a function type
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function type AST node. Next, navigate to
|
||||||
|
/// the calling convention node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_fn_type_addrspace: i32,
|
||||||
|
/// The source location points to the linksection expr of a function type
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function type AST node. Next, navigate to
|
||||||
|
/// the calling convention node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_fn_type_section: i32,
|
||||||
|
/// The source location points to the calling convention of a function type
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function type AST node. Next, navigate to
|
||||||
|
/// the calling convention node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_fn_type_cc: i32,
|
||||||
|
/// The source location points to the return type of a function type
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function type AST node. Next, navigate to
|
||||||
|
/// the return type node.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_fn_type_ret_ty: i32,
|
||||||
|
node_offset_param: i32,
|
||||||
|
token_offset_param: i32,
|
||||||
|
/// The source location points to the type expression of an `anyframe->T`
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a `anyframe->T` expression AST node. Next, navigate
|
||||||
|
/// to the type expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_anyframe_type: i32,
|
||||||
|
/// The source location points to the string literal of `extern "foo"`, found
|
||||||
|
/// by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to a function prototype or variable declaration
|
||||||
|
/// expression AST node. Next, navigate to the string literal of the `extern "foo"`.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_lib_name: i32,
|
||||||
|
/// The source location points to the len expression of an `[N:S]T`
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to an `[N:S]T` expression AST node. Next, navigate
|
||||||
|
/// to the len expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_array_type_len: i32,
|
||||||
|
/// The source location points to the sentinel expression of an `[N:S]T`
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to an `[N:S]T` expression AST node. Next, navigate
|
||||||
|
/// to the sentinel expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_array_type_sentinel: i32,
|
||||||
|
/// The source location points to the elem expression of an `[N:S]T`
|
||||||
|
/// expression, found by taking this AST node index offset from the containing
|
||||||
|
/// Decl AST node, which points to an `[N:S]T` expression AST node. Next, navigate
|
||||||
|
/// to the elem expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_array_type_elem: i32,
|
||||||
|
/// The source location points to the operand of an unary expression.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_un_op: i32,
|
||||||
|
/// The source location points to the elem type of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_elem: i32,
|
||||||
|
/// The source location points to the sentinel of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_sentinel: i32,
|
||||||
|
/// The source location points to the align expr of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_align: i32,
|
||||||
|
/// The source location points to the addrspace expr of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_addrspace: i32,
|
||||||
|
/// The source location points to the bit-offset of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_bitoffset: i32,
|
||||||
|
/// The source location points to the host size of a pointer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_ptr_hostsize: i32,
|
||||||
|
/// The source location points to the tag type of an union or an enum.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_container_tag: i32,
|
||||||
|
/// The source location points to the default value of a field.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_field_default: i32,
|
||||||
|
/// The source location points to the type of an array or struct initializer.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_init_ty: i32,
|
||||||
|
/// The source location points to the LHS of an assignment.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_store_ptr: i32,
|
||||||
|
/// The source location points to the RHS of an assignment.
|
||||||
|
/// The Decl is determined contextually.
|
||||||
|
node_offset_store_operand: i32,
|
||||||
|
|
||||||
|
pub fn nodeOffset(node_offset: i32) LazySrcLoc {
|
||||||
|
return .{ .node_offset = node_offset };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn toSrcLoc(lazy: LazySrcLoc, handle: *Handle, src_node: Ast.Node.Index) SrcLoc {
|
||||||
|
return switch (lazy) {
|
||||||
|
.unneeded,
|
||||||
|
.entire_file,
|
||||||
|
.byte_abs,
|
||||||
|
.token_abs,
|
||||||
|
.node_abs,
|
||||||
|
=> .{
|
||||||
|
.handle = handle,
|
||||||
|
.parent_decl_node = 0,
|
||||||
|
.lazy = lazy,
|
||||||
|
},
|
||||||
|
|
||||||
|
.byte_offset,
|
||||||
|
.token_offset,
|
||||||
|
.node_offset,
|
||||||
|
.node_offset_main_token,
|
||||||
|
.node_offset_initializer,
|
||||||
|
.node_offset_var_decl_ty,
|
||||||
|
.node_offset_var_decl_align,
|
||||||
|
.node_offset_var_decl_section,
|
||||||
|
.node_offset_var_decl_addrspace,
|
||||||
|
.node_offset_var_decl_init,
|
||||||
|
.node_offset_for_cond,
|
||||||
|
.node_offset_builtin_call_arg0,
|
||||||
|
.node_offset_builtin_call_arg1,
|
||||||
|
.node_offset_builtin_call_arg2,
|
||||||
|
.node_offset_builtin_call_arg3,
|
||||||
|
.node_offset_builtin_call_arg4,
|
||||||
|
.node_offset_builtin_call_arg5,
|
||||||
|
.node_offset_array_access_index,
|
||||||
|
.node_offset_slice_ptr,
|
||||||
|
.node_offset_slice_start,
|
||||||
|
.node_offset_slice_end,
|
||||||
|
.node_offset_slice_sentinel,
|
||||||
|
.node_offset_call_func,
|
||||||
|
.node_offset_field_name,
|
||||||
|
.node_offset_deref_ptr,
|
||||||
|
.node_offset_asm_source,
|
||||||
|
.node_offset_asm_ret_ty,
|
||||||
|
.node_offset_if_cond,
|
||||||
|
.node_offset_bin_op,
|
||||||
|
.node_offset_bin_lhs,
|
||||||
|
.node_offset_bin_rhs,
|
||||||
|
.node_offset_switch_operand,
|
||||||
|
.node_offset_switch_special_prong,
|
||||||
|
.node_offset_switch_range,
|
||||||
|
.node_offset_switch_prong_capture,
|
||||||
|
.node_offset_fn_type_align,
|
||||||
|
.node_offset_fn_type_addrspace,
|
||||||
|
.node_offset_fn_type_section,
|
||||||
|
.node_offset_fn_type_cc,
|
||||||
|
.node_offset_fn_type_ret_ty,
|
||||||
|
.node_offset_param,
|
||||||
|
.token_offset_param,
|
||||||
|
.node_offset_anyframe_type,
|
||||||
|
.node_offset_lib_name,
|
||||||
|
.node_offset_array_type_len,
|
||||||
|
.node_offset_array_type_sentinel,
|
||||||
|
.node_offset_array_type_elem,
|
||||||
|
.node_offset_un_op,
|
||||||
|
.node_offset_ptr_elem,
|
||||||
|
.node_offset_ptr_sentinel,
|
||||||
|
.node_offset_ptr_align,
|
||||||
|
.node_offset_ptr_addrspace,
|
||||||
|
.node_offset_ptr_bitoffset,
|
||||||
|
.node_offset_ptr_hostsize,
|
||||||
|
.node_offset_container_tag,
|
||||||
|
.node_offset_field_default,
|
||||||
|
.node_offset_init_ty,
|
||||||
|
.node_offset_store_ptr,
|
||||||
|
.node_offset_store_operand,
|
||||||
|
=> .{
|
||||||
|
.handle = handle,
|
||||||
|
.parent_decl_node = src_node,
|
||||||
|
.lazy = lazy,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
3856
src/stage2/Zir.zig
Normal file
3856
src/stage2/Zir.zig
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user