Merge branch 'zigtools:master' into cimport-support

This commit is contained in:
Techatrix 2022-08-21 08:53:06 +00:00 committed by GitHub
commit c6892eaafc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 46 additions and 14 deletions

View File

@ -7,6 +7,7 @@ pub fn build(b: *std.build.Builder) !void {
const mode = b.standardReleaseOptions(); const mode = b.standardReleaseOptions();
const exe = b.addExecutable("zls", "src/main.zig"); const exe = b.addExecutable("zls", "src/main.zig");
exe.use_stage1 = true;
const exe_options = b.addOptions(); const exe_options = b.addOptions();
exe.addOptions("build_options", exe_options); exe.addOptions("build_options", exe_options);
@ -76,11 +77,13 @@ pub fn build(b: *std.build.Builder) !void {
test_step.dependOn(b.getInstallStep()); test_step.dependOn(b.getInstallStep());
var unit_tests = b.addTest("src/unit_tests.zig"); var unit_tests = b.addTest("src/unit_tests.zig");
unit_tests.use_stage1 = true;
unit_tests.setBuildMode(.Debug); unit_tests.setBuildMode(.Debug);
unit_tests.setTarget(target); unit_tests.setTarget(target);
test_step.dependOn(&unit_tests.step); test_step.dependOn(&unit_tests.step);
var session_tests = b.addTest("tests/sessions.zig"); var session_tests = b.addTest("tests/sessions.zig");
session_tests.use_stage1 = true;
session_tests.addPackage(.{ .name = "header", .source = .{ .path = "src/header.zig" } }); session_tests.addPackage(.{ .name = "header", .source = .{ .path = "src/header.zig" } });
session_tests.addPackage(.{ .name = "server", .source = .{ .path = "src/Server.zig" }, .dependencies = exe.packages.items }); session_tests.addPackage(.{ .name = "server", .source = .{ .path = "src/Server.zig" }, .dependencies = exe.packages.items });
session_tests.setBuildMode(.Debug); session_tests.setBuildMode(.Debug);

View File

@ -239,11 +239,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
scopes: for (handle.document_scope.scopes) |scope| { scopes: for (handle.document_scope.scopes) |scope| {
const scope_data = switch (scope.data) { const scope_data = switch (scope.data) {
.function => |f| b: { .function => |f| b: {
var buf: [1]std.zig.Ast.Node.Index = undefined; if (!ast.fnProtoHasBody(tree, f).?) continue :scopes;
var proto = ast.fnProto(tree, f, &buf) orelse break :b f;
if (proto.extern_export_inline_token) |tok| {
if (std.mem.eql(u8, tree.tokenSlice(tok), "extern")) continue :scopes;
}
break :b f; break :b f;
}, },
.block => |b| b, .block => |b| b,
@ -254,7 +250,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
while (decl_iterator.next()) |decl| { while (decl_iterator.next()) |decl| {
var identifier_count: usize = 0; var identifier_count: usize = 0;
var name_token_index = switch (decl.value_ptr.*) { const name_token_index = switch (decl.value_ptr.*) {
.ast_node => |an| s: { .ast_node => |an| s: {
const an_tag = tree.nodes.items(.tag)[an]; const an_tag = tree.nodes.items(.tag)[an];
switch (an_tag) { switch (an_tag) {
@ -274,8 +270,27 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
const pit_start = tree.firstToken(scope_data); const pit_start = tree.firstToken(scope_data);
const pit_end = ast.lastToken(tree, scope_data); const pit_end = ast.lastToken(tree, scope_data);
for (tree.tokens.items(.tag)[pit_start..pit_end]) |tag, index| { const tags = tree.tokens.items(.tag)[pit_start..pit_end];
if (tag == .identifier and std.mem.eql(u8, tree.tokenSlice(pit_start + @intCast(u32, index)), tree.tokenSlice(name_token_index))) identifier_count += 1; for (tags) |tag, index| {
if (tag != .identifier) continue;
if (!std.mem.eql(u8, tree.tokenSlice(pit_start + @intCast(u32, index)), tree.tokenSlice(name_token_index))) continue;
if (index -| 1 > 0 and tags[index - 1] == .period) continue;
if (index +| 2 < tags.len and tags[index + 1] == .colon) switch (tags[index + 2]) {
.l_brace,
.keyword_inline,
.keyword_while,
.keyword_for,
.keyword_switch,
=> continue,
else => {},
};
if (index -| 2 > 0 and tags[index - 1] == .colon) switch (tags[index - 2]) {
.keyword_break,
.keyword_continue,
=> continue,
else => {},
};
identifier_count += 1;
} }
if (identifier_count <= 1) if (identifier_count <= 1)
@ -2450,7 +2465,6 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
return; return;
} }
std.debug.assert(tree.root.Object.get("method") != null);
const method = tree.root.Object.get("method").?.String; const method = tree.root.Object.get("method").?.String;
const start_time = std.time.milliTimestamp(); const start_time = std.time.milliTimestamp();
@ -2490,6 +2504,7 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
}; };
// Hack to avoid `return`ing in the inline for, which causes bugs. // Hack to avoid `return`ing in the inline for, which causes bugs.
// TODO: Change once stage2 is shipped and more stable?
var done: ?anyerror = null; var done: ?anyerror = null;
inline for (method_map) |method_info| { inline for (method_map) |method_info| {
if (done == null and std.mem.eql(u8, method, method_info[0])) { if (done == null and std.mem.eql(u8, method, method_info[0])) {

View File

@ -222,6 +222,9 @@ pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 {
} }
pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 { pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 {
if(field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) {
return ""; // TODO display the container's type
}
const start = offsets.tokenLocation(tree, field.ast.name_token).start; const start = offsets.tokenLocation(tree, field.ast.name_token).start;
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end; const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end;

View File

@ -1037,6 +1037,18 @@ pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
}; };
} }
pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool {
return switch (tree.nodes.items(.tag)[node]) {
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
=> false,
.fn_decl => true,
else => null,
};
}
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto { pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
return switch (tree.nodes.items(.tag)[node]) { return switch (tree.nodes.items(.tag)[node]) {
.fn_proto => tree.fnProto(node), .fn_proto => tree.fnProto(node),

@ -1 +1 @@
Subproject commit 9db1b99219c767d5e24994b1525273fe4031e464 Subproject commit 24845b0103e611c108d6bc334231c464e699742c

View File

@ -32,8 +32,7 @@ fn loop(server: *Server) !void {
try reader.readNoEof(buffer); try reader.readNoEof(buffer);
var writer = std.io.getStdOut().writer(); const writer = std.io.getStdOut().writer();
try server.processJsonRpc(writer, buffer); try server.processJsonRpc(writer, buffer);
} }
} }
@ -212,7 +211,7 @@ const stack_frames = switch (zig_builtin.mode) {
else => 0, else => 0,
}; };
pub fn main() anyerror!void { pub fn main() !void {
var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){}; var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){};
defer _ = gpa_state.deinit(); defer _ = gpa_state.deinit();
var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{}; var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{};

@ -1 +1 @@
Subproject commit 2d8723b69b39721eadcc296451012828899c0f17 Subproject commit f493d4aa8ba8141d9680473fad007d8a6348628e