Merge pull request #349 from leecannon/unused

Update to zig master
This commit is contained in:
Alexandros Naskos 2021-06-24 14:28:19 +03:00 committed by GitHub
commit 96af681d21
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 81 additions and 62 deletions

View File

@ -319,6 +319,7 @@ fn resolveVarDeclAliasInternal(
node_handle: NodeWithHandle, node_handle: NodeWithHandle,
root: bool, root: bool,
) error{OutOfMemory}!?DeclWithHandle { ) error{OutOfMemory}!?DeclWithHandle {
_ = root;
const handle = node_handle.handle; const handle = node_handle.handle;
const tree = handle.tree; const tree = handle.tree;
const node_tags = tree.nodes.items(.tag); const node_tags = tree.nodes.items(.tag);
@ -340,17 +341,6 @@ fn resolveVarDeclAliasInternal(
const lhs = datas[node_handle.node].lhs; const lhs = datas[node_handle.node].lhs;
const container_node = if (isBuiltinCall(tree, lhs)) block: { const container_node = if (isBuiltinCall(tree, lhs)) block: {
const data = datas[lhs];
const builtin = switch (node_tags[lhs]) {
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
&[_]ast.Node.Index{}
else if (data.rhs == 0)
&[_]ast.Node.Index{data.lhs}
else
&[_]ast.Node.Index{ data.lhs, data.rhs },
else => unreachable,
};
if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import")) if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import"))
return null; return null;
@ -365,7 +355,6 @@ fn resolveVarDeclAliasInternal(
.other => |n| n, .other => |n| n,
else => return null, else => return null,
}; };
const resolved_tree_tags = resolved.handle.tree.nodes.items(.tag);
if (!isContainer(resolved.handle.tree, resolved_node)) return null; if (!isContainer(resolved.handle.tree, resolved_node)) return null;
break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle }; break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle };
} else return null; } else return null;
@ -386,7 +375,6 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
const handle = decl_handle.handle; const handle = decl_handle.handle;
const tree = handle.tree; const tree = handle.tree;
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const main_tokes = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag); const node_tags = tree.nodes.items(.tag);
if (varDecl(handle.tree, decl)) |var_decl| { if (varDecl(handle.tree, decl)) |var_decl| {
@ -782,7 +770,7 @@ pub fn resolveTypeOfNodeInternal(
.call_one_comma, .call_one_comma,
.async_call_one, .async_call_one,
.async_call_one_comma, .async_call_one_comma,
=> |c| { => {
var params: [1]ast.Node.Index = undefined; var params: [1]ast.Node.Index = undefined;
const call = callFull(tree, node, &params) orelse unreachable; const call = callFull(tree, node, &params) orelse unreachable;
@ -898,8 +886,6 @@ pub fn resolveTypeOfNodeInternal(
}; };
}, },
.field_access => { .field_access => {
const field_access = datas[node];
if (datas[node].rhs == 0) return null; if (datas[node].rhs == 0) return null;
const rhs_str = tree.tokenSlice(datas[node].rhs); const rhs_str = tree.tokenSlice(datas[node].rhs);
// If we are accessing a pointer type, remove one pointerness level :) // If we are accessing a pointer type, remove one pointerness level :)
@ -1227,7 +1213,6 @@ pub fn getFieldAccessType(
}); });
var bound_type_params = BoundTypeParams.init(&arena.allocator); var bound_type_params = BoundTypeParams.init(&arena.allocator);
const tree = handle.tree;
while (true) { while (true) {
const tok = tokenizer.next(); const tok = tokenizer.next();
@ -1537,6 +1522,8 @@ pub fn documentPositionContext(
document: types.TextDocument, document: types.TextDocument,
doc_position: DocumentPosition, doc_position: DocumentPosition,
) !PositionContext { ) !PositionContext {
_ = document;
const line = doc_position.line; const line = doc_position.line;
var tokenizer = std.zig.Tokenizer.init(line[0..doc_position.line_index]); var tokenizer = std.zig.Tokenizer.init(line[0..doc_position.line_index]);
var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8); var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8);
@ -1545,7 +1532,7 @@ pub fn documentPositionContext(
const tok = tokenizer.next(); const tok = tokenizer.next();
// Early exits. // Early exits.
switch (tok.tag) { switch (tok.tag) {
.invalid, .invalid_ampersands => { .invalid => {
// Single '@' do not return a builtin token so we check this on our own. // Single '@' do not return a builtin token so we check this on our own.
if (line[doc_position.line_index - 1] == '@') { if (line[doc_position.line_index - 1] == '@') {
return PositionContext{ return PositionContext{
@ -1794,7 +1781,7 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, tree: ast.Tree, child: ast.Nod
try addOutlineNodes(allocator, tree, member, context); try addOutlineNodes(allocator, tree, member, context);
return; return;
}, },
else => |t| {}, else => {},
} }
try getDocumentSymbolsInternal(allocator, tree, child, context); try getDocumentSymbolsInternal(allocator, tree, child, context);
} }
@ -1941,7 +1928,6 @@ pub const DeclWithHandle = struct {
pub fn nameToken(self: DeclWithHandle) ast.TokenIndex { pub fn nameToken(self: DeclWithHandle) ast.TokenIndex {
const tree = self.handle.tree; const tree = self.handle.tree;
const token_tags = tree.tokens.items(.tag);
return switch (self.decl.*) { return switch (self.decl.*) {
.ast_node => |n| getDeclNameToken(tree, n).?, .ast_node => |n| getDeclNameToken(tree, n).?,
.param_decl => |p| p.name_token.?, .param_decl => |p| p.name_token.?,
@ -2252,7 +2238,7 @@ pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usiz
for (handle.document_scope.scopes[1..]) |*scope, idx| { for (handle.document_scope.scopes[1..]) |*scope, idx| {
if (source_index >= scope.range.start and source_index <= scope.range.end) { if (source_index >= scope.range.start and source_index <= scope.range.end) {
switch (scope.data) { switch (scope.data) {
.container, .function, .block => |node| current = idx + 1, .container, .function, .block => current = idx + 1,
else => {}, else => {},
} }
} }
@ -2419,10 +2405,12 @@ pub fn lookupSymbolContainer(
const CompletionContext = struct { const CompletionContext = struct {
pub fn hash(self: @This(), item: types.CompletionItem) u32 { pub fn hash(self: @This(), item: types.CompletionItem) u32 {
_ = self;
return @truncate(u32, std.hash.Wyhash.hash(0, item.label)); return @truncate(u32, std.hash.Wyhash.hash(0, item.label));
} }
pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem) bool { pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem) bool {
_ = self;
return std.mem.eql(u8, a.label, b.label); return std.mem.eql(u8, a.label, b.label);
} }
}; };
@ -2458,7 +2446,7 @@ pub const DocumentScope = struct {
var decl_it = scope.decls.iterator(); var decl_it = scope.decls.iterator();
var idx: usize = 0; var idx: usize = 0;
while (decl_it.next()) |name_decl| : (idx += 1) { while (decl_it.next()) |_| : (idx += 1) {
if (idx != 0) log.debug(", ", .{}); if (idx != 0) log.debug(", ", .{});
} }
log.debug("{s}", .{name_decl.key}); log.debug("{s}", .{name_decl.key});
@ -2639,6 +2627,7 @@ fn makeInnerScope(
continue; continue;
} }
if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| {
_ = existing;
// TODO Record a redefinition error. // TODO Record a redefinition error.
} }
@ -2652,7 +2641,7 @@ fn makeInnerScope(
else => null, else => null,
}; };
if (container_field) |field| { if (container_field) |_| {
if (!std.mem.eql(u8, name, "_")) { if (!std.mem.eql(u8, name, "_")) {
try context.enums.put(allocator, .{ try context.enums.put(allocator, .{
.label = name, .label = name,
@ -2741,6 +2730,7 @@ fn makeScopeInternal(
tree.tokenSlice(name_token), tree.tokenSlice(name_token),
.{ .param_decl = param }, .{ .param_decl = param },
)) |existing| { )) |existing| {
_ = existing;
// TODO record a redefinition error // TODO record a redefinition error
} }
} }
@ -2825,6 +2815,7 @@ fn makeScopeInternal(
if (varDecl(tree, idx)) |var_decl| { if (varDecl(tree, idx)) |var_decl| {
const name = tree.tokenSlice(var_decl.ast.mut_token + 1); const name = tree.tokenSlice(var_decl.ast.mut_token + 1);
if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| {
_ = existing;
// TODO record a redefinition error. // TODO record a redefinition error.
} }
} }
@ -2971,6 +2962,7 @@ fn makeScopeInternal(
tree.tokenSlice(index_token), tree.tokenSlice(index_token),
.{ .array_index = index_token }, .{ .array_index = index_token },
)) |existing| { )) |existing| {
_ = existing;
// TODO Record a redefinition error // TODO Record a redefinition error
} }
} }

View File

@ -4,7 +4,6 @@ const build_options = @import("build_options");
const Config = @import("config.zig"); const Config = @import("config.zig");
const DocumentStore = @import("document_store.zig"); const DocumentStore = @import("document_store.zig");
const readRequestHeader = @import("header.zig").readRequestHeader; const readRequestHeader = @import("header.zig").readRequestHeader;
const data = @import("data/" ++ build_options.data_version ++ ".zig");
const requests = @import("requests.zig"); const requests = @import("requests.zig");
const types = @import("types.zig"); const types = @import("types.zig");
const analysis = @import("analysis.zig"); const analysis = @import("analysis.zig");
@ -15,6 +14,12 @@ const offsets = @import("offsets.zig");
const setup = @import("setup.zig"); const setup = @import("setup.zig");
const semantic_tokens = @import("semantic_tokens.zig"); const semantic_tokens = @import("semantic_tokens.zig");
const known_folders = @import("known-folders"); const known_folders = @import("known-folders");
const data = blk: {
if (std.mem.eql(u8, build_options.data_version, "0.7.0")) break :blk @import("data/0.7.0.zig");
if (std.mem.eql(u8, build_options.data_version, "0.7.1")) break :blk @import("data/0.7.1.zig");
if (std.mem.eql(u8, build_options.data_version, "master")) break :blk @import("data/master.zig");
@compileError("invalid data_version provided");
};
const logger = std.log.scoped(.main); const logger = std.log.scoped(.main);
@ -45,7 +50,7 @@ pub fn log(
var arena = std.heap.ArenaAllocator.init(allocator); var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit(); defer arena.deinit();
var message = std.fmt.allocPrint(&arena.allocator, "[{s}-{s}] " ++ format, .{ @tagName(message_level), @tagName(scope) } ++ args) catch |err| { var message = std.fmt.allocPrint(&arena.allocator, "[{s}-{s}] " ++ format, .{ @tagName(message_level), @tagName(scope) } ++ args) catch {
std.debug.print("Failed to allocPrint message.\n", .{}); std.debug.print("Failed to allocPrint message.\n", .{});
return; return;
}; };
@ -182,12 +187,12 @@ fn respondGeneric(id: types.RequestId, response: []const u8) !void {
try stdout.flush(); try stdout.flush();
} }
fn showMessage(@"type": types.MessageType, message: []const u8) !void { fn showMessage(message_type: types.MessageType, message: []const u8) !void {
try send(types.Notification{ try send(types.Notification{
.method = "window/showMessage", .method = "window/showMessage",
.params = .{ .params = .{
.ShowMessageParams = .{ .ShowMessageParams = .{
.@"type" = @"type", .type = message_type,
.message = message, .message = message,
}, },
}, },
@ -707,16 +712,22 @@ fn getSymbolGlobal(arena: *std.heap.ArenaAllocator, pos_index: usize, handle: *D
} }
fn gotoDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { fn gotoDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
_ = config;
const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(id, null_result_response); const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
return try gotoDefinitionSymbol(id, arena, decl, false); return try gotoDefinitionSymbol(id, arena, decl, false);
} }
fn gotoDefinitionGlobal(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config, resolve_alias: bool) !void { fn gotoDefinitionGlobal(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config, resolve_alias: bool) !void {
_ = config;
const decl = (try getSymbolGlobal(arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response); const decl = (try getSymbolGlobal(arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
return try gotoDefinitionSymbol(id, arena, decl, resolve_alias); return try gotoDefinitionSymbol(id, arena, decl, resolve_alias);
} }
fn hoverDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { fn hoverDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
_ = config;
const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(id, null_result_response); const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
return try hoverSymbol(id, arena, decl); return try hoverSymbol(id, arena, decl);
} }
@ -746,6 +757,8 @@ fn hoverDefinitionBuiltin(arena: *std.heap.ArenaAllocator, id: types.RequestId,
} }
fn hoverDefinitionGlobal(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { fn hoverDefinitionGlobal(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
_ = config;
const decl = (try getSymbolGlobal(arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response); const decl = (try getSymbolGlobal(arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
return try hoverSymbol(id, arena, decl); return try hoverSymbol(id, arena, decl);
} }
@ -757,6 +770,8 @@ fn getSymbolFieldAccess(
range: analysis.SourceRange, range: analysis.SourceRange,
config: Config, config: Config,
) !?analysis.DeclWithHandle { ) !?analysis.DeclWithHandle {
_ = config;
const name = identifierFromPosition(position.absolute_index, handle.*); const name = identifierFromPosition(position.absolute_index, handle.*);
if (name.len == 0) return null; if (name.len == 0) return null;
var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]); var tokenizer = std.zig.Tokenizer.init(position.line[range.start..range.end]);
@ -804,6 +819,8 @@ fn hoverDefinitionFieldAccess(
} }
fn gotoDefinitionString(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { fn gotoDefinitionString(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
_ = config;
const tree = handle.tree; const tree = handle.tree;
const import_str = analysis.getImportStr(tree, 0, pos_index) orelse return try respondGeneric(id, null_result_response); const import_str = analysis.getImportStr(tree, 0, pos_index) orelse return try respondGeneric(id, null_result_response);
@ -1234,6 +1251,8 @@ fn loadConfigInFolder(folder_path: []const u8) ?Config {
} }
fn initializeHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.Initialize, config: Config) !void { fn initializeHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.Initialize, config: Config) !void {
_ = config;
for (req.params.capabilities.offsetEncoding.value) |encoding| { for (req.params.capabilities.offsetEncoding.value) |encoding| {
if (std.mem.eql(u8, encoding, "utf-8")) { if (std.mem.eql(u8, encoding, "utf-8")) {
offset_encoding = .utf8; offset_encoding = .utf8;
@ -1341,6 +1360,9 @@ fn initializeHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req:
var keep_running = true; var keep_running = true;
fn shutdownHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, config: Config) !void { fn shutdownHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, config: Config) !void {
_ = config;
_ = arena;
logger.notice("Server closing...", .{}); logger.notice("Server closing...", .{});
keep_running = false; keep_running = false;
@ -1357,6 +1379,8 @@ fn openDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req
} }
fn changeDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.ChangeDocument, config: Config) !void { fn changeDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.ChangeDocument, config: Config) !void {
_ = id;
const handle = document_store.getHandle(req.params.textDocument.uri) orelse { const handle = document_store.getHandle(req.params.textDocument.uri) orelse {
logger.debug("Trying to change non existent document {s}", .{req.params.textDocument.uri}); logger.debug("Trying to change non existent document {s}", .{req.params.textDocument.uri});
return; return;
@ -1367,6 +1391,9 @@ fn changeDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, r
} }
fn saveDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.SaveDocument, config: Config) error{OutOfMemory}!void { fn saveDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.SaveDocument, config: Config) error{OutOfMemory}!void {
_ = config;
_ = id;
_ = arena;
const handle = document_store.getHandle(req.params.textDocument.uri) orelse { const handle = document_store.getHandle(req.params.textDocument.uri) orelse {
logger.warn("Trying to save non existent document {s}", .{req.params.textDocument.uri}); logger.warn("Trying to save non existent document {s}", .{req.params.textDocument.uri});
return; return;
@ -1375,6 +1402,9 @@ fn saveDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req
} }
fn closeDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.CloseDocument, config: Config) error{}!void { fn closeDocumentHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.CloseDocument, config: Config) error{}!void {
_ = config;
_ = id;
_ = arena;
document_store.closeDocument(req.params.textDocument.uri); document_store.closeDocument(req.params.textDocument.uri);
} }
@ -1412,7 +1442,6 @@ fn completionHandler(
const doc_position = try offsets.documentPosition(handle.document, req.params.position, offset_encoding); const doc_position = try offsets.documentPosition(handle.document, req.params.position, offset_encoding);
const pos_context = try analysis.documentPositionContext(arena, handle.document, doc_position); const pos_context = try analysis.documentPositionContext(arena, handle.document, doc_position);
const use_snippets = config.enable_snippets and client_capabilities.supports_snippets;
switch (pos_context) { switch (pos_context) {
.builtin => try completeBuiltin(arena, id, config), .builtin => try completeBuiltin(arena, id, config),
@ -1431,6 +1460,8 @@ fn signatureHelpHandler(
req: requests.SignatureHelp, req: requests.SignatureHelp,
config: Config, config: Config,
) !void { ) !void {
_ = config;
const getSignatureInfo = @import("signature_help.zig").getSignatureInfo; const getSignatureInfo = @import("signature_help.zig").getSignatureInfo;
const handle = document_store.getHandle(req.params.textDocument.uri) orelse { const handle = document_store.getHandle(req.params.textDocument.uri) orelse {
logger.warn("Trying to get signature help in non existent document {s}", .{req.params.textDocument.uri}); logger.warn("Trying to get signature help in non existent document {s}", .{req.params.textDocument.uri});
@ -1520,6 +1551,8 @@ fn hoverHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: reque
} }
fn documentSymbolsHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.DocumentSymbols, config: Config) !void { fn documentSymbolsHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: requests.DocumentSymbols, config: Config) !void {
_ = config;
const handle = document_store.getHandle(req.params.textDocument.uri) orelse { const handle = document_store.getHandle(req.params.textDocument.uri) orelse {
logger.warn("Trying to get document symbols in non existent document {s}", .{req.params.textDocument.uri}); logger.warn("Trying to get document symbols in non existent document {s}", .{req.params.textDocument.uri});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);

View File

@ -39,6 +39,8 @@ pub fn labelReferences(
context: anytype, context: anytype,
comptime handler: anytype, comptime handler: anytype,
) !void { ) !void {
_ = arena;
std.debug.assert(decl.decl.* == .label_decl); std.debug.assert(decl.decl.* == .label_decl);
const handle = decl.handle; const handle = decl.handle;
const tree = handle.tree; const tree = handle.tree;
@ -529,7 +531,7 @@ pub fn symbolReferences(
} }
switch (decl_handle.decl.*) { switch (decl_handle.decl.*) {
.ast_node => |decl_node| { .ast_node => {
try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler);
var imports = std.ArrayList(*DocumentStore.Handle).init(&arena.allocator); var imports = std.ArrayList(*DocumentStore.Handle).init(&arena.allocator);
@ -600,6 +602,7 @@ pub fn symbolReferences(
log.warn("Could not find param decl's function", .{}); log.warn("Could not find param decl's function", .{});
return; return;
}; };
_ = fn_node;
}, },
.pointer_payload, .switch_payload, .array_payload, .array_index => { .pointer_payload, .switch_payload, .array_payload, .array_index => {
try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler);

View File

@ -227,7 +227,7 @@ pub const SignatureHelp = struct {
textDocument: TextDocumentIdentifier, textDocument: TextDocumentIdentifier,
position: types.Position, position: types.Position,
context: ?struct { context: ?struct {
triggerKind: enum { triggerKind: enum(u32) {
invoked = 1, invoked = 1,
trigger_character = 2, trigger_character = 2,
content_change = 3, content_change = 3,

View File

@ -45,7 +45,7 @@ pub const TokenModifiers = packed struct {
return res; return res;
} }
fn set(self: *TokenModifiers, comptime field: []const u8) callconv(.Inline) void { inline fn set(self: *TokenModifiers, comptime field: []const u8) void {
@field(self, field) = true; @field(self, field) = true;
} }
}; };
@ -181,20 +181,20 @@ const Builder = struct {
} }
}; };
fn writeToken( inline fn writeToken(
builder: *Builder, builder: *Builder,
token_idx: ?ast.TokenIndex, token_idx: ?ast.TokenIndex,
tok_type: TokenType, tok_type: TokenType,
) callconv(.Inline) !void { ) !void {
return try writeTokenMod(builder, token_idx, tok_type, .{}); return try writeTokenMod(builder, token_idx, tok_type, .{});
} }
fn writeTokenMod( inline fn writeTokenMod(
builder: *Builder, builder: *Builder,
token_idx: ?ast.TokenIndex, token_idx: ?ast.TokenIndex,
tok_type: TokenType, tok_type: TokenType,
tok_mod: TokenModifiers, tok_mod: TokenModifiers,
) callconv(.Inline) !void { ) !void {
if (token_idx) |ti| { if (token_idx) |ti| {
try builder.add(ti, tok_type, tok_mod); try builder.add(ti, tok_type, tok_mod);
} }
@ -224,7 +224,6 @@ fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle)
} }
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void { fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
const tree = builder.handle.tree;
if (type_node.type.is_type_val) { if (type_node.type.is_type_val) {
var new_tok_mod = tok_mod; var new_tok_mod = tok_mod;
if (type_node.isNamespace()) if (type_node.isNamespace())
@ -308,10 +307,9 @@ fn writeNodeTokens(
.block_two, .block_two,
.block_two_semicolon, .block_two_semicolon,
=> { => {
const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) block: { if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) {
try writeToken(builder, main_token - 2, .label); try writeToken(builder, main_token - 2, .label);
break :block main_token + 1; }
} else 0;
const statements: []const ast.Node.Index = switch (tag) { const statements: []const ast.Node.Index = switch (tag) {
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs], .block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],

View File

@ -72,14 +72,12 @@ pub fn wizard(allocator: *std.mem.Allocator) !void {
print("Found zig executable '{s}' in PATH.\n", .{path}); print("Found zig executable '{s}' in PATH.\n", .{path});
} else { } else {
write("Could not find 'zig' in PATH\n"); write("Could not find 'zig' in PATH\n");
zig_exe_path = try zinput.askString(allocator, zig_exe_path = try zinput.askString(allocator, if (std.builtin.os.tag == .windows)
if (std.builtin.os.tag == .windows)
\\What is the path to the 'zig' executable you would like to use? \\What is the path to the 'zig' executable you would like to use?
\\Note that due to a bug in zig (https://github.com/ziglang/zig/issues/6044), \\Note that due to a bug in zig (https://github.com/ziglang/zig/issues/6044),
\\your zig directory cannot contain the '/' character. \\your zig directory cannot contain the '/' character.
else else
"What is the path to the 'zig' executable you would like to use?", "What is the path to the 'zig' executable you would like to use?", std.fs.MAX_PATH_BYTES);
std.fs.MAX_PATH_BYTES);
} }
const editor = try zinput.askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Neovim, Vim8, Emacs, Doom, Other }); const editor = try zinput.askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Neovim, Vim8, Emacs, Doom, Other });
@ -99,7 +97,7 @@ pub fn wizard(allocator: *std.mem.Allocator) !void {
std.debug.warn("Writing config to {s}/zls.json ... ", .{config_path}); std.debug.warn("Writing config to {s}/zls.json ... ", .{config_path});
const content = std.json.stringify(.{ try std.json.stringify(.{
.zig_exe_path = zig_exe_path, .zig_exe_path = zig_exe_path,
.enable_snippets = snippets, .enable_snippets = snippets,
.warn_style = style, .warn_style = style,
@ -111,7 +109,6 @@ pub fn wizard(allocator: *std.mem.Allocator) !void {
write("successful.\n\n\n\n"); write("successful.\n\n\n\n");
// Keep synced with README.md // Keep synced with README.md
switch (editor) { switch (editor) {
.VSCode => { .VSCode => {
@ -231,7 +228,7 @@ pub fn findZig(allocator: *std.mem.Allocator) !?[]const u8 {
var it = std.mem.tokenize(env_path, &[_]u8{std.fs.path.delimiter}); var it = std.mem.tokenize(env_path, &[_]u8{std.fs.path.delimiter});
while (it.next()) |path| { while (it.next()) |path| {
if (std.builtin.os.tag == .windows) { if (std.builtin.os.tag == .windows) {
if (std.mem.indexOfScalar(u8, path, '/')) |s| continue; if (std.mem.indexOfScalar(u8, path, '/') != null) continue;
} }
const full_path = try std.fs.path.join(allocator, &[_][]const u8{ const full_path = try std.fs.path.join(allocator, &[_][]const u8{
path, path,

View File

@ -239,7 +239,7 @@ pub const CompletionItem = struct {
}; };
pub const DocumentSymbol = struct { pub const DocumentSymbol = struct {
const Kind = enum { const Kind = enum(u32) {
File = 1, File = 1,
Module = 2, Module = 2,
Namespace = 3, Namespace = 3,
@ -317,7 +317,7 @@ const InitializeResult = struct {
triggerCharacters: []const []const u8, triggerCharacters: []const []const u8,
retriggerCharacters: []const []const u8, retriggerCharacters: []const []const u8,
}, },
textDocumentSync: enum { textDocumentSync: enum(u32) {
None = 0, None = 0,
Full = 1, Full = 1,
Incremental = 2, Incremental = 2,

View File

@ -6,9 +6,8 @@ const reserved_chars = &[_]u8{
'(', ')', '*', '+', ',', ':', '(', ')', '*', '+', ',', ':',
';', '=', '?', '@', '[', ']', ';', '=', '?', '@', '[', ']',
}; };
const reserved_escapes = comptime blk: { const reserved_escapes = blk: {
var escapes: [reserved_chars.len][3]u8 var escapes: [reserved_chars.len][3]u8 = [_][3]u8{[_]u8{undefined} ** 3} ** reserved_chars.len;
= [_][3]u8{[_]u8{undefined} ** 3} ** reserved_chars.len;
for (reserved_chars) |c, i| { for (reserved_chars) |c, i| {
escapes[i][0] = '%'; escapes[i][0] = '%';
@ -25,8 +24,6 @@ pub fn fromPath(allocator: *mem.Allocator, path: []const u8) ![]const u8 {
var buf = std.ArrayList(u8).init(allocator); var buf = std.ArrayList(u8).init(allocator);
try buf.appendSlice(prefix); try buf.appendSlice(prefix);
const out_stream = buf.writer();
for (path) |char| { for (path) |char| {
if (char == std.fs.path.sep) { if (char == std.fs.path.sep) {
try buf.append('/'); try buf.append('/');
@ -139,4 +136,3 @@ pub fn parse(allocator: *mem.Allocator, str: []const u8) ![]u8 {
return allocator.shrink(uri, i); return allocator.shrink(uri, i);
} }