simplify analysis by introducing a Analysis Struct

This commit is contained in:
Techatrix 2023-03-12 23:48:31 +01:00 committed by Lee Cannon
parent 973d33d435
commit 0ff0a193cc
9 changed files with 375 additions and 432 deletions

View File

@ -7,7 +7,7 @@ const Config = @import("Config.zig");
const configuration = @import("configuration.zig"); const configuration = @import("configuration.zig");
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const ast = @import("ast.zig"); const ast = @import("ast.zig");
const references = @import("references.zig"); const references = @import("references.zig");
const offsets = @import("offsets.zig"); const offsets = @import("offsets.zig");
@ -37,6 +37,7 @@ const log = std.log.scoped(.zls_server);
config: *Config, config: *Config,
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
arena: std.heap.ArenaAllocator, arena: std.heap.ArenaAllocator,
analyser: Analyser,
document_store: DocumentStore, document_store: DocumentStore,
builtin_completions: ?std.ArrayListUnmanaged(types.CompletionItem), builtin_completions: ?std.ArrayListUnmanaged(types.CompletionItem),
client_capabilities: ClientCapabilities = .{}, client_capabilities: ClientCapabilities = .{},
@ -283,10 +284,10 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
if (func.extern_export_inline_token != null) break :blk; if (func.extern_export_inline_token != null) break :blk;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
const is_type_function = analysis.isTypeFunction(tree, func); const is_type_function = Analyser.isTypeFunction(tree, func);
const func_name = tree.tokenSlice(name_token); const func_name = tree.tokenSlice(name_token);
if (!is_type_function and !analysis.isCamelCase(func_name)) { if (!is_type_function and !Analyser.isCamelCase(func_name)) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding), .range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
.severity = .Hint, .severity = .Hint,
@ -294,7 +295,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
.source = "zls", .source = "zls",
.message = "Functions should be camelCase", .message = "Functions should be camelCase",
}); });
} else if (is_type_function and !analysis.isPascalCase(func_name)) { } else if (is_type_function and !Analyser.isPascalCase(func_name)) {
try diagnostics.append(allocator, .{ try diagnostics.append(allocator, .{
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding), .range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
.severity = .Hint, .severity = .Hint,
@ -512,7 +513,7 @@ pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Doc
var builder = code_actions.Builder{ var builder = code_actions.Builder{
.arena = server.arena.allocator(), .arena = server.arena.allocator(),
.document_store = &server.document_store, .analyser = &server.analyser,
.handle = handle, .handle = handle,
.offset_encoding = server.offset_encoding, .offset_encoding = server.offset_encoding,
}; };
@ -544,7 +545,7 @@ pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Doc
pub fn typeToCompletion( pub fn typeToCompletion(
server: *Server, server: *Server,
list: *std.ArrayListUnmanaged(types.CompletionItem), list: *std.ArrayListUnmanaged(types.CompletionItem),
field_access: analysis.FieldAccessReturn, field_access: Analyser.FieldAccessReturn,
orig_handle: *const DocumentStore.Handle, orig_handle: *const DocumentStore.Handle,
either_descriptor: ?[]const u8, either_descriptor: ?[]const u8,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
@ -620,8 +621,8 @@ pub fn typeToCompletion(
pub fn nodeToCompletion( pub fn nodeToCompletion(
server: *Server, server: *Server,
list: *std.ArrayListUnmanaged(types.CompletionItem), list: *std.ArrayListUnmanaged(types.CompletionItem),
node_handle: analysis.NodeWithHandle, node_handle: Analyser.NodeWithHandle,
unwrapped: ?analysis.TypeWithHandle, unwrapped: ?Analyser.TypeWithHandle,
orig_handle: *const DocumentStore.Handle, orig_handle: *const DocumentStore.Handle,
is_type_val: bool, is_type_val: bool,
parent_is_type_val: ?bool, parent_is_type_val: ?bool,
@ -645,7 +646,7 @@ pub fn nodeToCompletion(
const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation); const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation);
const doc: Documentation = if (try analysis.getDocComments( const doc: Documentation = if (try Analyser.getDocComments(
allocator, allocator,
handle.tree, handle.tree,
node, node,
@ -672,9 +673,7 @@ pub fn nodeToCompletion(
.parent_is_type_val = is_type_val, .parent_is_type_val = is_type_val,
.either_descriptor = either_descriptor, .either_descriptor = either_descriptor,
}; };
try analysis.iterateSymbolsContainer( try server.analyser.iterateSymbolsContainer(
allocator,
&server.document_store,
node_handle, node_handle,
orig_handle, orig_handle,
declToCompletion, declToCompletion,
@ -698,17 +697,17 @@ pub fn nodeToCompletion(
const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets; const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets;
const insert_text = if (use_snippets) blk: { const insert_text = if (use_snippets) blk: {
const skip_self_param = !(parent_is_type_val orelse true) and const skip_self_param = !(parent_is_type_val orelse true) and
try analysis.hasSelfParam(allocator, &server.document_store, handle, func); try server.analyser.hasSelfParam(handle, func);
break :blk try analysis.getFunctionSnippet(server.arena.allocator(), tree, func, skip_self_param); break :blk try Analyser.getFunctionSnippet(server.arena.allocator(), tree, func, skip_self_param);
} else tree.tokenSlice(func.name_token.?); } else tree.tokenSlice(func.name_token.?);
const is_type_function = analysis.isTypeFunction(handle.tree, func); const is_type_function = Analyser.isTypeFunction(handle.tree, func);
try list.append(allocator, .{ try list.append(allocator, .{
.label = handle.tree.tokenSlice(name_token), .label = handle.tree.tokenSlice(name_token),
.kind = if (is_type_function) .Struct else .Function, .kind = if (is_type_function) .Struct else .Function,
.documentation = doc, .documentation = doc,
.detail = analysis.getFunctionSignature(handle.tree, func), .detail = Analyser.getFunctionSignature(handle.tree, func),
.insertText = insert_text, .insertText = insert_text,
.insertTextFormat = if (use_snippets) .Snippet else .PlainText, .insertTextFormat = if (use_snippets) .Snippet else .PlainText,
}); });
@ -722,7 +721,7 @@ pub fn nodeToCompletion(
const var_decl = tree.fullVarDecl(node).?; const var_decl = tree.fullVarDecl(node).?;
const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const;
if (try analysis.resolveVarDeclAlias(allocator, &server.document_store, node_handle)) |result| { if (try server.analyser.resolveVarDeclAlias(node_handle)) |result| {
const context = DeclToCompletionContext{ const context = DeclToCompletionContext{
.server = server, .server = server,
.completions = list, .completions = list,
@ -736,7 +735,7 @@ pub fn nodeToCompletion(
.label = handle.tree.tokenSlice(var_decl.ast.mut_token + 1), .label = handle.tree.tokenSlice(var_decl.ast.mut_token + 1),
.kind = if (is_const) .Constant else .Variable, .kind = if (is_const) .Constant else .Variable,
.documentation = doc, .documentation = doc,
.detail = analysis.getVariableSignature(tree, var_decl), .detail = Analyser.getVariableSignature(tree, var_decl),
.insertText = tree.tokenSlice(var_decl.ast.mut_token + 1), .insertText = tree.tokenSlice(var_decl.ast.mut_token + 1),
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}); });
@ -750,7 +749,7 @@ pub fn nodeToCompletion(
.label = handle.tree.tokenSlice(field.ast.main_token), .label = handle.tree.tokenSlice(field.ast.main_token),
.kind = if (field.ast.tuple_like) .Enum else .Field, .kind = if (field.ast.tuple_like) .Enum else .Field,
.documentation = doc, .documentation = doc,
.detail = analysis.getContainerFieldSignature(handle.tree, field), .detail = Analyser.getContainerFieldSignature(handle.tree, field),
.insertText = tree.tokenSlice(field.ast.main_token), .insertText = tree.tokenSlice(field.ast.main_token),
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}); });
@ -825,7 +824,7 @@ pub fn nodeToCompletion(
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}); });
}, },
else => if (analysis.nodeToString(tree, node)) |string| { else => if (Analyser.nodeToString(tree, node)) |string| {
try list.append(allocator, .{ try list.append(allocator, .{
.label = string, .label = string,
.kind = .Field, .kind = .Field,
@ -842,12 +841,12 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
if (pos_index + 1 >= handle.text.len) return ""; if (pos_index + 1 >= handle.text.len) return "";
var start_idx = pos_index; var start_idx = pos_index;
while (start_idx > 0 and analysis.isSymbolChar(handle.text[start_idx - 1])) { while (start_idx > 0 and Analyser.isSymbolChar(handle.text[start_idx - 1])) {
start_idx -= 1; start_idx -= 1;
} }
var end_idx = pos_index; var end_idx = pos_index;
while (end_idx < handle.text.len and analysis.isSymbolChar(handle.text[end_idx])) { while (end_idx < handle.text.len and Analyser.isSymbolChar(handle.text[end_idx])) {
end_idx += 1; end_idx += 1;
} }
@ -857,7 +856,7 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
pub fn gotoDefinitionSymbol( pub fn gotoDefinitionSymbol(
server: *Server, server: *Server,
decl_handle: analysis.DeclWithHandle, decl_handle: Analyser.DeclWithHandle,
resolve_alias: bool, resolve_alias: bool,
) error{OutOfMemory}!?types.Location { ) error{OutOfMemory}!?types.Location {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
@ -868,14 +867,14 @@ pub fn gotoDefinitionSymbol(
const name_token = switch (decl_handle.decl.*) { const name_token = switch (decl_handle.decl.*) {
.ast_node => |node| block: { .ast_node => |node| block: {
if (resolve_alias) { if (resolve_alias) {
if (try analysis.resolveVarDeclAlias(server.arena.allocator(), &server.document_store, .{ .node = node, .handle = handle })) |result| { if (try server.analyser.resolveVarDeclAlias(.{ .node = node, .handle = handle })) |result| {
handle = result.handle; handle = result.handle;
break :block result.nameToken(); break :block result.nameToken();
} }
} }
break :block analysis.getDeclNameToken(handle.tree, node) orelse return null; break :block Analyser.getDeclNameToken(handle.tree, node) orelse return null;
}, },
else => decl_handle.nameToken(), else => decl_handle.nameToken(),
}; };
@ -886,7 +885,7 @@ pub fn gotoDefinitionSymbol(
}; };
} }
pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup_kind: types.MarkupKind) error{OutOfMemory}!?[]const u8 { pub fn hoverSymbol(server: *Server, decl_handle: Analyser.DeclWithHandle, markup_kind: types.MarkupKind) error{OutOfMemory}!?[]const u8 {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -897,27 +896,27 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
const def_str = switch (decl_handle.decl.*) { const def_str = switch (decl_handle.decl.*) {
.ast_node => |node| def: { .ast_node => |node| def: {
if (try analysis.resolveVarDeclAlias(server.arena.allocator(), &server.document_store, .{ .node = node, .handle = handle })) |result| { if (try server.analyser.resolveVarDeclAlias(.{ .node = node, .handle = handle })) |result| {
return try server.hoverSymbol(result, markup_kind); return try server.hoverSymbol(result, markup_kind);
} }
doc_str = try analysis.getDocComments(server.arena.allocator(), tree, node, markup_kind); doc_str = try Analyser.getDocComments(server.arena.allocator(), tree, node, markup_kind);
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
if (tree.fullVarDecl(node)) |var_decl| { if (tree.fullVarDecl(node)) |var_decl| {
break :def analysis.getVariableSignature(tree, var_decl); break :def Analyser.getVariableSignature(tree, var_decl);
} else if (tree.fullFnProto(&buf, node)) |fn_proto| { } else if (tree.fullFnProto(&buf, node)) |fn_proto| {
break :def analysis.getFunctionSignature(tree, fn_proto); break :def Analyser.getFunctionSignature(tree, fn_proto);
} else if (tree.fullContainerField(node)) |field| { } else if (tree.fullContainerField(node)) |field| {
break :def analysis.getContainerFieldSignature(tree, field); break :def Analyser.getContainerFieldSignature(tree, field);
} else { } else {
break :def analysis.nodeToString(tree, node) orelse return null; break :def Analyser.nodeToString(tree, node) orelse return null;
} }
}, },
.param_payload => |pay| def: { .param_payload => |pay| def: {
const param = pay.param; const param = pay.param;
if (param.first_doc_comment) |doc_comments| { if (param.first_doc_comment) |doc_comments| {
doc_str = try analysis.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, markup_kind, false); doc_str = try Analyser.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, markup_kind, false);
} }
const first_token = ast.paramFirstToken(tree, param); const first_token = ast.paramFirstToken(tree, param);
@ -936,9 +935,7 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
=> tree.tokenSlice(decl_handle.nameToken()), => tree.tokenSlice(decl_handle.nameToken()),
}; };
var bound_type_params = analysis.BoundTypeParams{}; const resolved_type = try decl_handle.resolveType(&server.analyser);
defer bound_type_params.deinit(server.document_store.allocator);
const resolved_type = try decl_handle.resolveType(server.arena.allocator(), &server.document_store, &bound_type_params);
const resolved_type_str = if (resolved_type) |rt| const resolved_type_str = if (resolved_type) |rt|
if (rt.type.is_type_val) switch (rt.type.data) { if (rt.type.is_type_val) switch (rt.type.data) {
@ -1002,28 +999,28 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
return hover_text; return hover_text;
} }
pub fn getLabelGlobal(pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?analysis.DeclWithHandle { pub fn getLabelGlobal(pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?Analyser.DeclWithHandle {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
const name = identifierFromPosition(pos_index, handle.*); const name = identifierFromPosition(pos_index, handle.*);
if (name.len == 0) return null; if (name.len == 0) return null;
return try analysis.lookupLabel(handle, name, pos_index); return try Analyser.lookupLabel(handle, name, pos_index);
} }
pub fn getSymbolGlobal( pub fn getSymbolGlobal(
server: *Server, server: *Server,
pos_index: usize, pos_index: usize,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
) error{OutOfMemory}!?analysis.DeclWithHandle { ) error{OutOfMemory}!?Analyser.DeclWithHandle {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
const name = identifierFromPosition(pos_index, handle.*); const name = identifierFromPosition(pos_index, handle.*);
if (name.len == 0) return null; if (name.len == 0) return null;
return try analysis.lookupSymbolGlobal(server.arena.allocator(), &server.document_store, handle, name, pos_index); return try server.analyser.lookupSymbolGlobal(handle, name, pos_index);
} }
pub fn gotoDefinitionLabel( pub fn gotoDefinitionLabel(
@ -1173,7 +1170,7 @@ pub fn getSymbolFieldAccesses(
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
source_index: usize, source_index: usize,
loc: offsets.Loc, loc: offsets.Loc,
) error{OutOfMemory}!?[]const analysis.DeclWithHandle { ) error{OutOfMemory}!?[]const Analyser.DeclWithHandle {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -1183,9 +1180,9 @@ pub fn getSymbolFieldAccesses(
var held_range = try server.arena.allocator().dupeZ(u8, offsets.locToSlice(handle.text, loc)); var held_range = try server.arena.allocator().dupeZ(u8, offsets.locToSlice(handle.text, loc));
var tokenizer = std.zig.Tokenizer.init(held_range); var tokenizer = std.zig.Tokenizer.init(held_range);
var decls_with_handles = std.ArrayListUnmanaged(analysis.DeclWithHandle){}; var decls_with_handles = std.ArrayListUnmanaged(Analyser.DeclWithHandle){};
if (try analysis.getFieldAccessType(server.arena.allocator(), &server.document_store, handle, source_index, &tokenizer)) |result| { if (try server.analyser.getFieldAccessType(handle, source_index, &tokenizer)) |result| {
const container_handle = result.unwrapped orelse result.original; const container_handle = result.unwrapped orelse result.original;
const container_handle_nodes = try container_handle.getAllTypesWithHandles(server.arena.allocator()); const container_handle_nodes = try container_handle.getAllTypesWithHandles(server.arena.allocator());
@ -1195,9 +1192,7 @@ pub fn getSymbolFieldAccesses(
.other => |n| n, .other => |n| n,
else => continue, else => continue,
}; };
try decls_with_handles.append(server.arena.allocator(), (try analysis.lookupSymbolContainer( try decls_with_handles.append(server.arena.allocator(), (try server.analyser.lookupSymbolContainer(
server.arena.allocator(),
&server.document_store,
.{ .node = container_handle_node, .handle = ty.handle }, .{ .node = container_handle_node, .handle = ty.handle },
name, name,
true, true,
@ -1263,7 +1258,7 @@ pub fn hoverDefinitionFieldAccess(
pub fn gotoDefinitionString( pub fn gotoDefinitionString(
server: *Server, server: *Server,
pos_context: analysis.PositionContext, pos_context: Analyser.PositionContext,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
) error{OutOfMemory}!?types.Location { ) error{OutOfMemory}!?types.Location {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
@ -1320,7 +1315,7 @@ const DeclToCompletionContext = struct {
either_descriptor: ?[]const u8 = null, either_descriptor: ?[]const u8 = null,
}; };
pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) error{OutOfMemory}!void { pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: Analyser.DeclWithHandle) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -1345,9 +1340,9 @@ pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.
const doc: Documentation = if (param.first_doc_comment) |doc_comments| .{ .MarkupContent = types.MarkupContent{ const doc: Documentation = if (param.first_doc_comment) |doc_comments| .{ .MarkupContent = types.MarkupContent{
.kind = doc_kind, .kind = doc_kind,
.value = if (context.either_descriptor) |ed| .value = if (context.either_descriptor) |ed|
try std.fmt.allocPrint(allocator, "`Conditionally available: {s}`\n\n{s}", .{ ed, try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false) }) try std.fmt.allocPrint(allocator, "`Conditionally available: {s}`\n\n{s}", .{ ed, try Analyser.collectDocComments(allocator, tree, doc_comments, doc_kind, false) })
else else
try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false), try Analyser.collectDocComments(allocator, tree, doc_comments, doc_kind, false),
} } else null; } } else null;
const first_token = ast.paramFirstToken(tree, param); const first_token = ast.paramFirstToken(tree, param);
@ -1406,7 +1401,7 @@ pub fn completeLabel(
.completions = &completions, .completions = &completions,
.orig_handle = handle, .orig_handle = handle,
}; };
try analysis.iterateLabels(handle, pos_index, declToCompletion, context); try Analyser.iterateLabels(handle, pos_index, declToCompletion, context);
return completions.toOwnedSlice(server.arena.allocator()); return completions.toOwnedSlice(server.arena.allocator());
} }
@ -1490,7 +1485,7 @@ pub fn completeGlobal(server: *Server, pos_index: usize, handle: *const Document
.completions = &completions, .completions = &completions,
.orig_handle = handle, .orig_handle = handle,
}; };
try analysis.iterateSymbolsGlobal(server.arena.allocator(), &server.document_store, handle, pos_index, declToCompletion, context); try server.analyser.iterateSymbolsGlobal(handle, pos_index, declToCompletion, context);
try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.generic, server.config.*, null); try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.generic, server.config.*, null);
if (server.client_capabilities.label_details_support) { if (server.client_capabilities.label_details_support) {
@ -1513,7 +1508,7 @@ pub fn completeFieldAccess(server: *Server, handle: *const DocumentStore.Handle,
var held_loc = try allocator.dupeZ(u8, offsets.locToSlice(handle.text, loc)); var held_loc = try allocator.dupeZ(u8, offsets.locToSlice(handle.text, loc));
var tokenizer = std.zig.Tokenizer.init(held_loc); var tokenizer = std.zig.Tokenizer.init(held_loc);
const result = (try analysis.getFieldAccessType(allocator, &server.document_store, handle, source_index, &tokenizer)) orelse return null; const result = (try server.analyser.getFieldAccessType(handle, source_index, &tokenizer)) orelse return null;
try server.typeToCompletion(&completions, result, handle, null); try server.typeToCompletion(&completions, result, handle, null);
if (server.client_capabilities.label_details_support) { if (server.client_capabilities.label_details_support) {
for (completions.items) |*item| { for (completions.items) |*item| {
@ -1739,9 +1734,9 @@ pub fn completeFileSystemStringLiteral(
arena: std.mem.Allocator, arena: std.mem.Allocator,
store: DocumentStore, store: DocumentStore,
handle: DocumentStore.Handle, handle: DocumentStore.Handle,
pos_context: analysis.PositionContext, pos_context: Analyser.PositionContext,
) ![]types.CompletionItem { ) ![]types.CompletionItem {
var completions: analysis.CompletionSet = .{}; var completions: Analyser.CompletionSet = .{};
const loc = pos_context.loc().?; const loc = pos_context.loc().?;
var completing = handle.tree.source[loc.start + 1 .. loc.end - 1]; var completing = handle.tree.source[loc.start + 1 .. loc.end - 1];
@ -2217,6 +2212,9 @@ fn changeDocumentHandler(server: *Server, notification: types.DidChangeTextDocum
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
// whenever a document changes, any cached info is invalidated
server.analyser.invalidate();
const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return; const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return;
const new_text = try diff.applyContentChanges(server.allocator, handle.text, notification.contentChanges, server.offset_encoding); const new_text = try diff.applyContentChanges(server.allocator, handle.text, notification.contentChanges, server.offset_encoding);
@ -2288,7 +2286,7 @@ fn semanticTokensFullHandler(server: *Server, request: types.SemanticTokensParam
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, null, server.offset_encoding); return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, null, server.offset_encoding);
} }
fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens { fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens {
@ -2300,7 +2298,7 @@ fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRang
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
const loc = offsets.rangeToLoc(handle.tree.source, request.range, server.offset_encoding); const loc = offsets.rangeToLoc(handle.tree.source, request.range, server.offset_encoding);
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, loc, server.offset_encoding); return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, loc, server.offset_encoding);
} }
pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList { pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList {
@ -2317,7 +2315,7 @@ pub fn completionHandler(server: *Server, request: types.CompletionParams) Error
} }
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, false); const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, false);
const maybe_completions = switch (pos_context) { const maybe_completions = switch (pos_context) {
.builtin => try server.completeBuiltin(), .builtin => try server.completeBuiltin(),
@ -2345,7 +2343,7 @@ pub fn completionHandler(server: *Server, request: types.CompletionParams) Error
// The cursor is in the middle of a word or before a @, so we can replace // The cursor is in the middle of a word or before a @, so we can replace
// the remaining identifier with the completion instead of just inserting. // the remaining identifier with the completion instead of just inserting.
// TODO Identify function call/struct init and replace the whole thing. // TODO Identify function call/struct init and replace the whole thing.
const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const lookahead_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
if (server.client_capabilities.supports_apply_edits and if (server.client_capabilities.supports_apply_edits and
pos_context != .import_string_literal and pos_context != .import_string_literal and
pos_context != .cinclude_string_literal and pos_context != .cinclude_string_literal and
@ -2403,7 +2401,7 @@ pub fn signatureHelpHandler(server: *Server, request: types.SignatureHelpParams)
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const signature_info = (try getSignatureInfo( const signature_info = (try getSignatureInfo(
&server.document_store, &server.analyser,
server.arena.allocator(), server.arena.allocator(),
handle, handle,
source_index, source_index,
@ -2429,7 +2427,7 @@ pub fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, r
if (request.position.character == 0) return null; if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
return switch (pos_context) { return switch (pos_context) {
.builtin => |loc| .{ .Location = (try server.gotoDefinitionBuiltin(handle, loc)) orelse return null }, .builtin => |loc| .{ .Location = (try server.gotoDefinitionBuiltin(handle, loc)) orelse return null },
@ -2473,7 +2471,7 @@ pub fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Ho
if (request.position.character == 0) return null; if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
const response = switch (pos_context) { const response = switch (pos_context) {
.builtin => try server.hoverDefinitionBuiltin(source_index, handle), .builtin => try server.hoverDefinitionBuiltin(source_index, handle),
@ -2636,7 +2634,7 @@ pub fn generalReferencesHandler(server: *Server, request: GeneralReferencesReque
if (request.position().character <= 0) return null; if (request.position().character <= 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true); const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
// TODO: Make this work with branching types // TODO: Make this work with branching types
const decl = switch (pos_context) { const decl = switch (pos_context) {
@ -2663,7 +2661,7 @@ pub fn generalReferencesHandler(server: *Server, request: GeneralReferencesReque
else else
try references.symbolReferences( try references.symbolReferences(
allocator, allocator,
&server.document_store, &server.analyser,
decl, decl,
server.offset_encoding, server.offset_encoding,
include_decl, include_decl,
@ -2736,7 +2734,7 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
const hints = try inlay_hints.writeRangeInlayHint( const hints = try inlay_hints.writeRangeInlayHint(
server.arena.allocator(), server.arena.allocator(),
server.config.*, server.config.*,
&server.document_store, &server.analyser,
handle, handle,
loc, loc,
hover_kind, hover_kind,
@ -2786,7 +2784,7 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]
var builder = code_actions.Builder{ var builder = code_actions.Builder{
.arena = server.arena.allocator(), .arena = server.arena.allocator(),
.document_store = &server.document_store, .analyser = &server.analyser,
.handle = handle, .handle = handle,
.offset_encoding = server.offset_encoding, .offset_encoding = server.offset_encoding,
}; };
@ -3203,16 +3201,12 @@ pub fn create(
recording_enabled: bool, recording_enabled: bool,
replay_enabled: bool, replay_enabled: bool,
) !*Server { ) !*Server {
// TODO replace global with something like an Analyser struct
// which contains using_trail & resolve_trail and place it inside Server
// see: https://github.com/zigtools/zls/issues/536
analysis.init(allocator);
const server = try allocator.create(Server); const server = try allocator.create(Server);
server.* = Server{ server.* = Server{
.config = config, .config = config,
.runtime_zig_version = null, .runtime_zig_version = null,
.allocator = allocator, .allocator = allocator,
.analyser = undefined,
.arena = std.heap.ArenaAllocator.init(allocator), .arena = std.heap.ArenaAllocator.init(allocator),
.document_store = .{ .document_store = .{
.allocator = allocator, .allocator = allocator,
@ -3224,6 +3218,7 @@ pub fn create(
.replay_enabled = replay_enabled, .replay_enabled = replay_enabled,
.status = .uninitialized, .status = .uninitialized,
}; };
server.analyser = Analyser.init(allocator, server.arena.allocator(), &server.document_store);
try configuration.configChanged(config, &server.runtime_zig_version, allocator, config_path); try configuration.configChanged(config, &server.runtime_zig_version, allocator, config_path);
@ -3232,7 +3227,7 @@ pub fn create(
pub fn destroy(server: *Server) void { pub fn destroy(server: *Server) void {
server.document_store.deinit(); server.document_store.deinit();
analysis.deinit(); server.analyser.deinit();
if (server.builtin_completions) |*completions| completions.deinit(server.allocator); if (server.builtin_completions) |*completions| completions.deinit(server.allocator);

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@ const std = @import("std");
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const ast = @import("ast.zig"); const ast = @import("ast.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
@ -10,7 +10,7 @@ const offsets = @import("offsets.zig");
pub const Builder = struct { pub const Builder = struct {
arena: std.mem.Allocator, arena: std.mem.Allocator,
document_store: *DocumentStore, analyser: *Analyser,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
offset_encoding: offsets.Encoding, offset_encoding: offsets.Encoding,
@ -88,9 +88,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman
const token_starts = tree.tokens.items(.start); const token_starts = tree.tokens.items(.start);
const decl = (try analysis.lookupSymbolGlobal( const decl = (try builder.analyser.lookupSymbolGlobal(
builder.arena,
builder.document_store,
builder.handle, builder.handle,
identifier_name, identifier_name,
loc.start, loc.start,
@ -134,9 +132,7 @@ fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnma
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start); const token_starts = tree.tokens.items(.start);
const decl = (try analysis.lookupSymbolGlobal( const decl = (try builder.analyser.lookupSymbolGlobal(
builder.arena,
builder.document_store,
builder.handle, builder.handle,
identifier_name, identifier_name,
loc.start, loc.start,

View File

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const zig_builtin = @import("builtin"); const zig_builtin = @import("builtin");
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
const offsets = @import("offsets.zig"); const offsets = @import("offsets.zig");
const tracy = @import("tracy.zig"); const tracy = @import("tracy.zig");
@ -26,7 +26,7 @@ pub const InlayHint = struct {
const Builder = struct { const Builder = struct {
arena: std.mem.Allocator, arena: std.mem.Allocator,
store: *DocumentStore, analyser: *Analyser,
config: *const Config, config: *const Config,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
hints: std.ArrayListUnmanaged(InlayHint), hints: std.ArrayListUnmanaged(InlayHint),
@ -64,7 +64,7 @@ const Builder = struct {
/// `call` is the function call /// `call` is the function call
/// `decl_handle` should be a function protototype /// `decl_handle` should be a function protototype
/// writes parameter hints into `builder.hints` /// writes parameter hints into `builder.hints`
fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.DeclWithHandle) !void { fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: Analyser.DeclWithHandle) !void {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -85,7 +85,7 @@ fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.D
var i: usize = 0; var i: usize = 0;
var it = fn_proto.iterate(&decl_tree); var it = fn_proto.iterate(&decl_tree);
if (try analysis.hasSelfParam(builder.arena, builder.store, decl_handle.handle, fn_proto)) { if (try builder.analyser.hasSelfParam(decl_handle.handle, fn_proto)) {
_ = ast.nextFnParam(&it); _ = ast.nextFnParam(&it);
} }
@ -187,7 +187,7 @@ fn writeCallNodeHint(builder: *Builder, call: Ast.full.Call) !void {
const source_index = offsets.tokenToIndex(tree, main_tokens[call.ast.fn_expr]); const source_index = offsets.tokenToIndex(tree, main_tokens[call.ast.fn_expr]);
const name = offsets.tokenToSlice(tree, main_tokens[call.ast.fn_expr]); const name = offsets.tokenToSlice(tree, main_tokens[call.ast.fn_expr]);
if (try analysis.lookupSymbolGlobal(builder.arena, builder.store, handle, name, source_index)) |decl_handle| { if (try builder.analyser.lookupSymbolGlobal(handle, name, source_index)) |decl_handle| {
try writeCallHint(builder, call, decl_handle); try writeCallHint(builder, call, decl_handle);
} }
}, },
@ -204,13 +204,11 @@ fn writeCallNodeHint(builder: *Builder, call: Ast.full.Call) !void {
// note: we have the ast node, traversing it would probably yield better results // note: we have the ast node, traversing it would probably yield better results
// than trying to re-tokenize and re-parse it // than trying to re-tokenize and re-parse it
if (try analysis.getFieldAccessType(builder.arena, builder.store, handle, rhs_loc.end, &tokenizer)) |result| { if (try builder.analyser.getFieldAccessType(handle, rhs_loc.end, &tokenizer)) |result| {
const container_handle = result.unwrapped orelse result.original; const container_handle = result.unwrapped orelse result.original;
switch (container_handle.type.data) { switch (container_handle.type.data) {
.other => |container_handle_node| { .other => |container_handle_node| {
if (try analysis.lookupSymbolContainer( if (try builder.analyser.lookupSymbolContainer(
builder.arena,
builder.store,
.{ .node = container_handle_node, .handle = container_handle.handle }, .{ .node = container_handle_node, .handle = container_handle.handle },
tree.tokenSlice(rhsToken), tree.tokenSlice(rhsToken),
true, true,
@ -285,7 +283,7 @@ fn writeNodeInlayHint(
pub fn writeRangeInlayHint( pub fn writeRangeInlayHint(
arena: std.mem.Allocator, arena: std.mem.Allocator,
config: Config, config: Config,
store: *DocumentStore, analyser: *Analyser,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
loc: offsets.Loc, loc: offsets.Loc,
hover_kind: types.MarkupKind, hover_kind: types.MarkupKind,
@ -295,7 +293,7 @@ pub fn writeRangeInlayHint(
var builder: Builder = .{ var builder: Builder = .{
.arena = arena, .arena = arena,
.store = store, .analyser = analyser,
.config = &config, .config = &config,
.handle = handle, .handle = handle,
.hints = .{}, .hints = .{},

View File

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
const offsets = @import("offsets.zig"); const offsets = @import("offsets.zig");
const log = std.log.scoped(.zls_references); const log = std.log.scoped(.zls_references);
@ -9,7 +9,7 @@ const ast = @import("ast.zig");
pub fn labelReferences( pub fn labelReferences(
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
decl: analysis.DeclWithHandle, decl: Analyser.DeclWithHandle,
encoding: offsets.Encoding, encoding: offsets.Encoding,
include_decl: bool, include_decl: bool,
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) { ) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
@ -57,8 +57,8 @@ const Builder = struct {
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
locations: std.ArrayListUnmanaged(types.Location) = .{}, locations: std.ArrayListUnmanaged(types.Location) = .{},
/// this is the declaration we are searching for /// this is the declaration we are searching for
decl_handle: analysis.DeclWithHandle, decl_handle: Analyser.DeclWithHandle,
store: *DocumentStore, analyser: *Analyser,
encoding: offsets.Encoding, encoding: offsets.Encoding,
const Context = struct { const Context = struct {
@ -98,12 +98,10 @@ const Builder = struct {
.identifier, .identifier,
.test_decl, .test_decl,
=> { => {
const identifier_token = analysis.getDeclNameToken(tree, node).?; const identifier_token = Analyser.getDeclNameToken(tree, node).?;
if (token_tags[identifier_token] != .identifier) return; if (token_tags[identifier_token] != .identifier) return;
const child = (try analysis.lookupSymbolGlobal( const child = (try builder.analyser.lookupSymbolGlobal(
builder.allocator,
builder.store,
handle, handle,
offsets.tokenToSlice(tree, identifier_token), offsets.tokenToSlice(tree, identifier_token),
starts[identifier_token], starts[identifier_token],
@ -114,18 +112,8 @@ const Builder = struct {
} }
}, },
.field_access => { .field_access => {
var bound_type_params = analysis.BoundTypeParams{}; const left_type = try builder.analyser.resolveFieldAccessLhsType(
defer bound_type_params.deinit(builder.store.allocator); (try builder.analyser.resolveTypeOfNode(.{ .node = datas[node].lhs, .handle = handle })) orelse return,
const left_type = try analysis.resolveFieldAccessLhsType(
builder.allocator,
builder.store,
(try analysis.resolveTypeOfNodeInternal(
builder.allocator,
builder.store,
.{ .node = datas[node].lhs, .handle = handle },
&bound_type_params,
)) orelse return,
&bound_type_params,
); );
const left_type_node = switch (left_type.type.data) { const left_type_node = switch (left_type.type.data) {
@ -133,9 +121,7 @@ const Builder = struct {
else => return, else => return,
}; };
const child = (try analysis.lookupSymbolContainer( const child = (try builder.analyser.lookupSymbolContainer(
self.builder.allocator,
builder.store,
.{ .node = left_type_node, .handle = left_type.handle }, .{ .node = left_type_node, .handle = left_type.handle },
offsets.tokenToSlice(tree, datas[node].rhs), offsets.tokenToSlice(tree, datas[node].rhs),
!left_type.type.is_type_val, !left_type.type.is_type_val,
@ -152,8 +138,8 @@ const Builder = struct {
pub fn symbolReferences( pub fn symbolReferences(
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
store: *DocumentStore, analyser: *Analyser,
decl_handle: analysis.DeclWithHandle, decl_handle: Analyser.DeclWithHandle,
encoding: offsets.Encoding, encoding: offsets.Encoding,
/// add `decl_handle` as a references /// add `decl_handle` as a references
include_decl: bool, include_decl: bool,
@ -166,7 +152,7 @@ pub fn symbolReferences(
var builder = Builder{ var builder = Builder{
.allocator = allocator, .allocator = allocator,
.store = store, .analyser = analyser,
.decl_handle = decl_handle, .decl_handle = decl_handle,
.encoding = encoding, .encoding = encoding,
}; };
@ -194,7 +180,7 @@ pub fn symbolReferences(
dependencies.deinit(allocator); dependencies.deinit(allocator);
} }
for (store.handles.values()) |handle| { for (analyser.store.handles.values()) |handle| {
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) { if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri)) if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
continue; continue;
@ -207,7 +193,7 @@ pub fn symbolReferences(
} }
handle_dependencies.deinit(allocator); handle_dependencies.deinit(allocator);
} }
try store.collectDependencies(allocator, handle.*, &handle_dependencies); try analyser.store.collectDependencies(allocator, handle.*, &handle_dependencies);
try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len); try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len);
for (handle_dependencies.items) |uri| { for (handle_dependencies.items) |uri| {
@ -217,7 +203,7 @@ pub fn symbolReferences(
for (dependencies.keys()) |uri| { for (dependencies.keys()) |uri| {
if (std.mem.eql(u8, uri, curr_handle.uri)) continue; if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
const handle = store.getHandle(uri) orelse continue; const handle = analyser.store.getHandle(uri) orelse continue;
try builder.collectReferences(handle, 0); try builder.collectReferences(handle, 0);
} }

View File

@ -2,7 +2,7 @@ const std = @import("std");
const zig_builtin = @import("builtin"); const zig_builtin = @import("builtin");
const offsets = @import("offsets.zig"); const offsets = @import("offsets.zig");
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const ast = @import("ast.zig"); const ast = @import("ast.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
@ -40,7 +40,7 @@ pub const TokenModifiers = packed struct(u16) {
const Builder = struct { const Builder = struct {
arena: std.mem.Allocator, arena: std.mem.Allocator,
store: *DocumentStore, analyser: *Analyser,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
previous_source_index: usize = 0, previous_source_index: usize = 0,
previous_token: ?Ast.TokenIndex = null, previous_token: ?Ast.TokenIndex = null,
@ -203,7 +203,7 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H
}); });
} }
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void { fn colorIdentifierBasedOnType(builder: *Builder, type_node: Analyser.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
if (type_node.type.is_type_val) { if (type_node.type.is_type_val) {
var new_tok_mod = tok_mod; var new_tok_mod = tok_mod;
if (type_node.isNamespace()) if (type_node.isNamespace())
@ -300,7 +300,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.aligned_var_decl, .aligned_var_decl,
=> { => {
const var_decl = tree.fullVarDecl(node).?; const var_decl = tree.fullVarDecl(node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
try writeDocComments(builder, tree, comment_idx); try writeDocComments(builder, tree, comment_idx);
try writeToken(builder, var_decl.visib_token, .keyword); try writeToken(builder, var_decl.visib_token, .keyword);
@ -309,7 +309,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
try writeToken(builder, var_decl.comptime_token, .keyword); try writeToken(builder, var_decl.comptime_token, .keyword);
try writeToken(builder, var_decl.ast.mut_token, .keyword); try writeToken(builder, var_decl.ast.mut_token, .keyword);
if (try analysis.resolveTypeOfNode(allocator, builder.store, .{ .node = node, .handle = handle })) |decl_type| { if (try builder.analyser.resolveTypeOfNode(.{ .node = node, .handle = handle })) |decl_type| {
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true }); try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true });
} else { } else {
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
@ -393,15 +393,13 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
if (std.mem.eql(u8, name, "_")) { if (std.mem.eql(u8, name, "_")) {
return; return;
} else if (analysis.isValueIdent(name)) { } else if (Analyser.isValueIdent(name)) {
return try writeToken(builder, main_token, .keywordLiteral); return try writeToken(builder, main_token, .keywordLiteral);
} else if (analysis.isTypeIdent(name)) { } else if (Analyser.isTypeIdent(name)) {
return try writeToken(builder, main_token, .type); return try writeToken(builder, main_token, .type);
} }
if (try analysis.lookupSymbolGlobal( if (try builder.analyser.lookupSymbolGlobal(
allocator,
builder.store,
handle, handle,
name, name,
tree.tokens.items(.start)[main_token], tree.tokens.items(.start)[main_token],
@ -409,10 +407,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
if (child.decl.* == .param_payload) { if (child.decl.* == .param_payload) {
return try writeToken(builder, main_token, .parameter); return try writeToken(builder, main_token, .parameter);
} }
var bound_type_params = analysis.BoundTypeParams{}; if (try child.resolveType(builder.analyser)) |decl_type| {
defer bound_type_params.deinit(builder.store.allocator);
if (try child.resolveType(allocator, builder.store, &bound_type_params)) |decl_type| {
return try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); return try colorIdentifierBasedOnType(builder, decl_type, main_token, .{});
} }
} }
@ -426,7 +421,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?; const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |docs|
try writeDocComments(builder, tree, docs); try writeDocComments(builder, tree, docs);
try writeToken(builder, fn_proto.visib_token, .keyword); try writeToken(builder, fn_proto.visib_token, .keyword);
@ -434,14 +429,14 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
try writeToken(builder, fn_proto.lib_name, .string); try writeToken(builder, fn_proto.lib_name, .string);
try writeToken(builder, fn_proto.ast.fn_token, .keyword); try writeToken(builder, fn_proto.ast.fn_token, .keyword);
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto)) const func_name_tok_type: TokenType = if (Analyser.isTypeFunction(tree, fn_proto))
.type .type
else else
.function; .function;
const tok_mod = TokenModifiers{ const tok_mod = TokenModifiers{
.declaration = true, .declaration = true,
.generic = analysis.isGenericFunction(tree, fn_proto), .generic = Analyser.isGenericFunction(tree, fn_proto),
}; };
try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod);
@ -477,7 +472,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.@"comptime", .@"comptime",
.@"nosuspend", .@"nosuspend",
=> { => {
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
try writeDocComments(builder, tree, doc); try writeDocComments(builder, tree, doc);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
@ -617,9 +612,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
if (struct_init.ast.type_expr != 0) { if (struct_init.ast.type_expr != 0) {
try callWriteNodeTokens(allocator, .{ builder, struct_init.ast.type_expr }); try callWriteNodeTokens(allocator, .{ builder, struct_init.ast.type_expr });
field_token_type = if (try analysis.resolveTypeOfNode( field_token_type = if (try builder.analyser.resolveTypeOfNode(
allocator,
builder.store,
.{ .node = struct_init.ast.type_expr, .handle = handle }, .{ .node = struct_init.ast.type_expr, .handle = handle },
)) |struct_type| switch (struct_type.type.data) { )) |struct_type| switch (struct_type.type.data) {
.other => |type_node| if (ast.isContainer(struct_type.handle.tree, type_node)) .other => |type_node| if (ast.isContainer(struct_type.handle.tree, type_node))
@ -775,7 +768,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.asm_input, .asm_input,
=> unreachable, => unreachable,
.test_decl => { .test_decl => {
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
try writeDocComments(builder, tree, doc); try writeDocComments(builder, tree, doc);
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
@ -869,27 +862,14 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
// writeToken code. // writeToken code.
// Maybe we can hook into it instead? Also applies to Identifier and VarDecl // Maybe we can hook into it instead? Also applies to Identifier and VarDecl
var bound_type_params = analysis.BoundTypeParams{}; const lhs_type = try builder.analyser.resolveFieldAccessLhsType(
defer bound_type_params.deinit(builder.store.allocator); (try builder.analyser.resolveTypeOfNode(.{ .node = data.lhs, .handle = handle })) orelse return,
const lhs_type = try analysis.resolveFieldAccessLhsType(
allocator,
builder.store,
(try analysis.resolveTypeOfNodeInternal(
allocator,
builder.store,
.{ .node = data.lhs, .handle = handle },
&bound_type_params,
)) orelse return,
&bound_type_params,
); );
const left_type_node = switch (lhs_type.type.data) { const left_type_node = switch (lhs_type.type.data) {
.other => |n| n, .other => |n| n,
else => return, else => return,
}; };
if (try analysis.lookupSymbolContainer( if (try builder.analyser.lookupSymbolContainer(
allocator,
builder.store,
.{ .node = left_type_node, .handle = lhs_type.handle }, .{ .node = left_type_node, .handle = lhs_type.handle },
tree.tokenSlice(data.rhs), tree.tokenSlice(data.rhs),
!lhs_type.type.is_type_val, !lhs_type.type.is_type_val,
@ -913,7 +893,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
else => {}, else => {},
} }
if (try decl_type.resolveType(allocator, builder.store, &bound_type_params)) |resolved_type| { if (try decl_type.resolveType(builder.analyser)) |resolved_type| {
try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{}); try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{});
} }
} }
@ -992,7 +972,7 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
var allocator = builder.arena; var allocator = builder.arena;
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs| if (Analyser.getDocCommentTokenIndex(tokens, base)) |docs|
try writeDocComments(builder, tree, docs); try writeDocComments(builder, tree, docs);
try writeToken(builder, container_field.comptime_token, .keyword); try writeToken(builder, container_field.comptime_token, .keyword);
@ -1026,14 +1006,14 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
/// TODO edit version. /// TODO edit version.
pub fn writeSemanticTokens( pub fn writeSemanticTokens(
arena: std.mem.Allocator, arena: std.mem.Allocator,
store: *DocumentStore, analyser: *Analyser,
handle: *const DocumentStore.Handle, handle: *const DocumentStore.Handle,
loc: ?offsets.Loc, loc: ?offsets.Loc,
encoding: offsets.Encoding, encoding: offsets.Encoding,
) error{OutOfMemory}!types.SemanticTokens { ) error{OutOfMemory}!types.SemanticTokens {
var builder = Builder{ var builder = Builder{
.arena = arena, .arena = arena,
.store = store, .analyser = analyser,
.handle = handle, .handle = handle,
.encoding = encoding, .encoding = encoding,
}; };

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const analysis = @import("analysis.zig"); const Analyser = @import("analysis.zig");
const offsets = @import("offsets.zig"); const offsets = @import("offsets.zig");
const DocumentStore = @import("DocumentStore.zig"); const DocumentStore = @import("DocumentStore.zig");
const types = @import("lsp.zig"); const types = @import("lsp.zig");
@ -8,14 +8,14 @@ const Token = std.zig.Token;
const identifierFromPosition = @import("Server.zig").identifierFromPosition; const identifierFromPosition = @import("Server.zig").identifierFromPosition;
const ast = @import("ast.zig"); const ast = @import("ast.zig");
fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation { fn fnProtoToSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
const tree = handle.tree; const tree = handle.tree;
const token_starts = tree.tokens.items(.start); const token_starts = tree.tokens.items(.start);
const label = analysis.getFunctionSignature(tree, proto); const label = Analyser.getFunctionSignature(tree, proto);
const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .markdown)) orelse ""; const proto_comments = (try Analyser.getDocComments(alloc, tree, fn_node, .markdown)) orelse "";
const arg_idx = if (skip_self_param) blk: { const arg_idx = if (skip_self_param) blk: {
const has_self_param = try analysis.hasSelfParam(alloc, document_store, handle, proto); const has_self_param = try analyser.hasSelfParam(handle, proto);
break :blk commas + @boolToInt(has_self_param); break :blk commas + @boolToInt(has_self_param);
} else commas; } else commas;
@ -23,7 +23,7 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocat
var param_it = proto.iterate(&tree); var param_it = proto.iterate(&tree);
while (ast.nextFnParam(&param_it)) |param| { while (ast.nextFnParam(&param_it)) |param| {
const param_comments = if (param.first_doc_comment) |dc| const param_comments = if (param.first_doc_comment) |dc|
try analysis.collectDocComments(alloc, tree, dc, .markdown, false) try Analyser.collectDocComments(alloc, tree, dc, .markdown, false)
else else
""; "";
@ -70,8 +70,8 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocat
}; };
} }
pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator, handle: *const DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation { pub fn getSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, handle: *const DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation {
const innermost_block = analysis.innermostBlockScope(handle.*, absolute_index); const innermost_block = Analyser.innermostBlockScope(handle.*, absolute_index);
const tree = handle.tree; const tree = handle.tree;
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start); const token_starts = tree.tokens.items(.start);
@ -256,9 +256,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
// Resolve the expression. // Resolve the expression.
var tokenizer = std.zig.Tokenizer.init(held_expr); var tokenizer = std.zig.Tokenizer.init(held_expr);
if (try analysis.getFieldAccessType( if (try analyser.getFieldAccessType(
alloc,
document_store,
handle, handle,
expr_start, expr_start,
&tokenizer, &tokenizer,
@ -275,7 +273,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| { if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo( return try fnProtoToSignatureInfo(
document_store, analyser,
alloc, alloc,
paren_commas, paren_commas,
false, false,
@ -292,9 +290,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
} }
const skip_self_param = !type_handle.type.is_type_val; const skip_self_param = !type_handle.type.is_type_val;
const decl_handle = (try analysis.lookupSymbolContainer( const decl_handle = (try analyser.lookupSymbolContainer(
alloc,
document_store,
.{ .node = node, .handle = type_handle.handle }, .{ .node = node, .handle = type_handle.handle },
name, name,
true, true,
@ -311,9 +307,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
}, },
}; };
if (try analysis.resolveVarDeclAlias( if (try analyser.resolveVarDeclAlias(
alloc,
document_store,
.{ .node = node, .handle = decl_handle.handle }, .{ .node = node, .handle = decl_handle.handle },
)) |resolved| { )) |resolved| {
switch (resolved.decl.*) { switch (resolved.decl.*) {
@ -327,7 +321,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
if (res_handle.tree.fullFnProto(&buf, node)) |proto| { if (res_handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo( return try fnProtoToSignatureInfo(
document_store, analyser,
alloc, alloc,
paren_commas, paren_commas,
skip_self_param, skip_self_param,

View File

@ -2,7 +2,7 @@
//! zigbot9001 to take advantage of zls' tools //! zigbot9001 to take advantage of zls' tools
pub const ast = @import("ast.zig"); pub const ast = @import("ast.zig");
pub const analysis = @import("analysis.zig"); pub const Analyser = @import("analysis.zig");
pub const Header = @import("Header.zig"); pub const Header = @import("Header.zig");
pub const debug = @import("debug.zig"); pub const debug = @import("debug.zig");
pub const offsets = @import("offsets.zig"); pub const offsets = @import("offsets.zig");

View File

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const zls = @import("zls"); const zls = @import("zls");
const analysis = zls.analysis; const Analyser = zls.Analyser;
const types = zls.types; const types = zls.types;
const offsets = zls.offsets; const offsets = zls.offsets;
@ -510,12 +510,12 @@ test "position context - empty" {
); );
} }
fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), maybe_range: ?[]const u8) !void { fn testContext(line: []const u8, tag: std.meta.Tag(Analyser.PositionContext), maybe_range: ?[]const u8) !void {
const cursor_idx = std.mem.indexOf(u8, line, "<cursor>").?; const cursor_idx = std.mem.indexOf(u8, line, "<cursor>").?;
const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] }); const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] });
defer allocator.free(final_line); defer allocator.free(final_line);
const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx, true); const ctx = try Analyser.getPositionContext(allocator, final_line, cursor_idx, true);
if (std.meta.activeTag(ctx) != tag) { if (std.meta.activeTag(ctx) != tag) {
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) }); std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });