simplify analysis by introducing a Analysis Struct
This commit is contained in:
parent
973d33d435
commit
0ff0a193cc
139
src/Server.zig
139
src/Server.zig
@ -7,7 +7,7 @@ const Config = @import("Config.zig");
|
||||
const configuration = @import("configuration.zig");
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const types = @import("lsp.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const ast = @import("ast.zig");
|
||||
const references = @import("references.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
@ -37,6 +37,7 @@ const log = std.log.scoped(.zls_server);
|
||||
config: *Config,
|
||||
allocator: std.mem.Allocator,
|
||||
arena: std.heap.ArenaAllocator,
|
||||
analyser: Analyser,
|
||||
document_store: DocumentStore,
|
||||
builtin_completions: ?std.ArrayListUnmanaged(types.CompletionItem),
|
||||
client_capabilities: ClientCapabilities = .{},
|
||||
@ -283,10 +284,10 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
|
||||
if (func.extern_export_inline_token != null) break :blk;
|
||||
|
||||
if (func.name_token) |name_token| {
|
||||
const is_type_function = analysis.isTypeFunction(tree, func);
|
||||
const is_type_function = Analyser.isTypeFunction(tree, func);
|
||||
|
||||
const func_name = tree.tokenSlice(name_token);
|
||||
if (!is_type_function and !analysis.isCamelCase(func_name)) {
|
||||
if (!is_type_function and !Analyser.isCamelCase(func_name)) {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||
.severity = .Hint,
|
||||
@ -294,7 +295,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
|
||||
.source = "zls",
|
||||
.message = "Functions should be camelCase",
|
||||
});
|
||||
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
||||
} else if (is_type_function and !Analyser.isPascalCase(func_name)) {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = offsets.tokenToRange(tree, name_token, server.offset_encoding),
|
||||
.severity = .Hint,
|
||||
@ -512,7 +513,7 @@ pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Doc
|
||||
|
||||
var builder = code_actions.Builder{
|
||||
.arena = server.arena.allocator(),
|
||||
.document_store = &server.document_store,
|
||||
.analyser = &server.analyser,
|
||||
.handle = handle,
|
||||
.offset_encoding = server.offset_encoding,
|
||||
};
|
||||
@ -544,7 +545,7 @@ pub fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Doc
|
||||
pub fn typeToCompletion(
|
||||
server: *Server,
|
||||
list: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
field_access: analysis.FieldAccessReturn,
|
||||
field_access: Analyser.FieldAccessReturn,
|
||||
orig_handle: *const DocumentStore.Handle,
|
||||
either_descriptor: ?[]const u8,
|
||||
) error{OutOfMemory}!void {
|
||||
@ -620,8 +621,8 @@ pub fn typeToCompletion(
|
||||
pub fn nodeToCompletion(
|
||||
server: *Server,
|
||||
list: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
node_handle: analysis.NodeWithHandle,
|
||||
unwrapped: ?analysis.TypeWithHandle,
|
||||
node_handle: Analyser.NodeWithHandle,
|
||||
unwrapped: ?Analyser.TypeWithHandle,
|
||||
orig_handle: *const DocumentStore.Handle,
|
||||
is_type_val: bool,
|
||||
parent_is_type_val: ?bool,
|
||||
@ -645,7 +646,7 @@ pub fn nodeToCompletion(
|
||||
|
||||
const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation);
|
||||
|
||||
const doc: Documentation = if (try analysis.getDocComments(
|
||||
const doc: Documentation = if (try Analyser.getDocComments(
|
||||
allocator,
|
||||
handle.tree,
|
||||
node,
|
||||
@ -672,9 +673,7 @@ pub fn nodeToCompletion(
|
||||
.parent_is_type_val = is_type_val,
|
||||
.either_descriptor = either_descriptor,
|
||||
};
|
||||
try analysis.iterateSymbolsContainer(
|
||||
allocator,
|
||||
&server.document_store,
|
||||
try server.analyser.iterateSymbolsContainer(
|
||||
node_handle,
|
||||
orig_handle,
|
||||
declToCompletion,
|
||||
@ -698,17 +697,17 @@ pub fn nodeToCompletion(
|
||||
const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets;
|
||||
const insert_text = if (use_snippets) blk: {
|
||||
const skip_self_param = !(parent_is_type_val orelse true) and
|
||||
try analysis.hasSelfParam(allocator, &server.document_store, handle, func);
|
||||
break :blk try analysis.getFunctionSnippet(server.arena.allocator(), tree, func, skip_self_param);
|
||||
try server.analyser.hasSelfParam(handle, func);
|
||||
break :blk try Analyser.getFunctionSnippet(server.arena.allocator(), tree, func, skip_self_param);
|
||||
} else tree.tokenSlice(func.name_token.?);
|
||||
|
||||
const is_type_function = analysis.isTypeFunction(handle.tree, func);
|
||||
const is_type_function = Analyser.isTypeFunction(handle.tree, func);
|
||||
|
||||
try list.append(allocator, .{
|
||||
.label = handle.tree.tokenSlice(name_token),
|
||||
.kind = if (is_type_function) .Struct else .Function,
|
||||
.documentation = doc,
|
||||
.detail = analysis.getFunctionSignature(handle.tree, func),
|
||||
.detail = Analyser.getFunctionSignature(handle.tree, func),
|
||||
.insertText = insert_text,
|
||||
.insertTextFormat = if (use_snippets) .Snippet else .PlainText,
|
||||
});
|
||||
@ -722,7 +721,7 @@ pub fn nodeToCompletion(
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const;
|
||||
|
||||
if (try analysis.resolveVarDeclAlias(allocator, &server.document_store, node_handle)) |result| {
|
||||
if (try server.analyser.resolveVarDeclAlias(node_handle)) |result| {
|
||||
const context = DeclToCompletionContext{
|
||||
.server = server,
|
||||
.completions = list,
|
||||
@ -736,7 +735,7 @@ pub fn nodeToCompletion(
|
||||
.label = handle.tree.tokenSlice(var_decl.ast.mut_token + 1),
|
||||
.kind = if (is_const) .Constant else .Variable,
|
||||
.documentation = doc,
|
||||
.detail = analysis.getVariableSignature(tree, var_decl),
|
||||
.detail = Analyser.getVariableSignature(tree, var_decl),
|
||||
.insertText = tree.tokenSlice(var_decl.ast.mut_token + 1),
|
||||
.insertTextFormat = .PlainText,
|
||||
});
|
||||
@ -750,7 +749,7 @@ pub fn nodeToCompletion(
|
||||
.label = handle.tree.tokenSlice(field.ast.main_token),
|
||||
.kind = if (field.ast.tuple_like) .Enum else .Field,
|
||||
.documentation = doc,
|
||||
.detail = analysis.getContainerFieldSignature(handle.tree, field),
|
||||
.detail = Analyser.getContainerFieldSignature(handle.tree, field),
|
||||
.insertText = tree.tokenSlice(field.ast.main_token),
|
||||
.insertTextFormat = .PlainText,
|
||||
});
|
||||
@ -825,7 +824,7 @@ pub fn nodeToCompletion(
|
||||
.insertTextFormat = .PlainText,
|
||||
});
|
||||
},
|
||||
else => if (analysis.nodeToString(tree, node)) |string| {
|
||||
else => if (Analyser.nodeToString(tree, node)) |string| {
|
||||
try list.append(allocator, .{
|
||||
.label = string,
|
||||
.kind = .Field,
|
||||
@ -842,12 +841,12 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
|
||||
if (pos_index + 1 >= handle.text.len) return "";
|
||||
var start_idx = pos_index;
|
||||
|
||||
while (start_idx > 0 and analysis.isSymbolChar(handle.text[start_idx - 1])) {
|
||||
while (start_idx > 0 and Analyser.isSymbolChar(handle.text[start_idx - 1])) {
|
||||
start_idx -= 1;
|
||||
}
|
||||
|
||||
var end_idx = pos_index;
|
||||
while (end_idx < handle.text.len and analysis.isSymbolChar(handle.text[end_idx])) {
|
||||
while (end_idx < handle.text.len and Analyser.isSymbolChar(handle.text[end_idx])) {
|
||||
end_idx += 1;
|
||||
}
|
||||
|
||||
@ -857,7 +856,7 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
|
||||
|
||||
pub fn gotoDefinitionSymbol(
|
||||
server: *Server,
|
||||
decl_handle: analysis.DeclWithHandle,
|
||||
decl_handle: Analyser.DeclWithHandle,
|
||||
resolve_alias: bool,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
@ -868,14 +867,14 @@ pub fn gotoDefinitionSymbol(
|
||||
const name_token = switch (decl_handle.decl.*) {
|
||||
.ast_node => |node| block: {
|
||||
if (resolve_alias) {
|
||||
if (try analysis.resolveVarDeclAlias(server.arena.allocator(), &server.document_store, .{ .node = node, .handle = handle })) |result| {
|
||||
if (try server.analyser.resolveVarDeclAlias(.{ .node = node, .handle = handle })) |result| {
|
||||
handle = result.handle;
|
||||
|
||||
break :block result.nameToken();
|
||||
}
|
||||
}
|
||||
|
||||
break :block analysis.getDeclNameToken(handle.tree, node) orelse return null;
|
||||
break :block Analyser.getDeclNameToken(handle.tree, node) orelse return null;
|
||||
},
|
||||
else => decl_handle.nameToken(),
|
||||
};
|
||||
@ -886,7 +885,7 @@ pub fn gotoDefinitionSymbol(
|
||||
};
|
||||
}
|
||||
|
||||
pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup_kind: types.MarkupKind) error{OutOfMemory}!?[]const u8 {
|
||||
pub fn hoverSymbol(server: *Server, decl_handle: Analyser.DeclWithHandle, markup_kind: types.MarkupKind) error{OutOfMemory}!?[]const u8 {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -897,27 +896,27 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
|
||||
|
||||
const def_str = switch (decl_handle.decl.*) {
|
||||
.ast_node => |node| def: {
|
||||
if (try analysis.resolveVarDeclAlias(server.arena.allocator(), &server.document_store, .{ .node = node, .handle = handle })) |result| {
|
||||
if (try server.analyser.resolveVarDeclAlias(.{ .node = node, .handle = handle })) |result| {
|
||||
return try server.hoverSymbol(result, markup_kind);
|
||||
}
|
||||
doc_str = try analysis.getDocComments(server.arena.allocator(), tree, node, markup_kind);
|
||||
doc_str = try Analyser.getDocComments(server.arena.allocator(), tree, node, markup_kind);
|
||||
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
|
||||
if (tree.fullVarDecl(node)) |var_decl| {
|
||||
break :def analysis.getVariableSignature(tree, var_decl);
|
||||
break :def Analyser.getVariableSignature(tree, var_decl);
|
||||
} else if (tree.fullFnProto(&buf, node)) |fn_proto| {
|
||||
break :def analysis.getFunctionSignature(tree, fn_proto);
|
||||
break :def Analyser.getFunctionSignature(tree, fn_proto);
|
||||
} else if (tree.fullContainerField(node)) |field| {
|
||||
break :def analysis.getContainerFieldSignature(tree, field);
|
||||
break :def Analyser.getContainerFieldSignature(tree, field);
|
||||
} else {
|
||||
break :def analysis.nodeToString(tree, node) orelse return null;
|
||||
break :def Analyser.nodeToString(tree, node) orelse return null;
|
||||
}
|
||||
},
|
||||
.param_payload => |pay| def: {
|
||||
const param = pay.param;
|
||||
if (param.first_doc_comment) |doc_comments| {
|
||||
doc_str = try analysis.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, markup_kind, false);
|
||||
doc_str = try Analyser.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, markup_kind, false);
|
||||
}
|
||||
|
||||
const first_token = ast.paramFirstToken(tree, param);
|
||||
@ -936,9 +935,7 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
|
||||
=> tree.tokenSlice(decl_handle.nameToken()),
|
||||
};
|
||||
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
defer bound_type_params.deinit(server.document_store.allocator);
|
||||
const resolved_type = try decl_handle.resolveType(server.arena.allocator(), &server.document_store, &bound_type_params);
|
||||
const resolved_type = try decl_handle.resolveType(&server.analyser);
|
||||
|
||||
const resolved_type_str = if (resolved_type) |rt|
|
||||
if (rt.type.is_type_val) switch (rt.type.data) {
|
||||
@ -1002,28 +999,28 @@ pub fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle, markup
|
||||
return hover_text;
|
||||
}
|
||||
|
||||
pub fn getLabelGlobal(pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?analysis.DeclWithHandle {
|
||||
pub fn getLabelGlobal(pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?Analyser.DeclWithHandle {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
const name = identifierFromPosition(pos_index, handle.*);
|
||||
if (name.len == 0) return null;
|
||||
|
||||
return try analysis.lookupLabel(handle, name, pos_index);
|
||||
return try Analyser.lookupLabel(handle, name, pos_index);
|
||||
}
|
||||
|
||||
pub fn getSymbolGlobal(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?analysis.DeclWithHandle {
|
||||
) error{OutOfMemory}!?Analyser.DeclWithHandle {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
const name = identifierFromPosition(pos_index, handle.*);
|
||||
if (name.len == 0) return null;
|
||||
|
||||
return try analysis.lookupSymbolGlobal(server.arena.allocator(), &server.document_store, handle, name, pos_index);
|
||||
return try server.analyser.lookupSymbolGlobal(handle, name, pos_index);
|
||||
}
|
||||
|
||||
pub fn gotoDefinitionLabel(
|
||||
@ -1173,7 +1170,7 @@ pub fn getSymbolFieldAccesses(
|
||||
handle: *const DocumentStore.Handle,
|
||||
source_index: usize,
|
||||
loc: offsets.Loc,
|
||||
) error{OutOfMemory}!?[]const analysis.DeclWithHandle {
|
||||
) error{OutOfMemory}!?[]const Analyser.DeclWithHandle {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -1183,9 +1180,9 @@ pub fn getSymbolFieldAccesses(
|
||||
var held_range = try server.arena.allocator().dupeZ(u8, offsets.locToSlice(handle.text, loc));
|
||||
var tokenizer = std.zig.Tokenizer.init(held_range);
|
||||
|
||||
var decls_with_handles = std.ArrayListUnmanaged(analysis.DeclWithHandle){};
|
||||
var decls_with_handles = std.ArrayListUnmanaged(Analyser.DeclWithHandle){};
|
||||
|
||||
if (try analysis.getFieldAccessType(server.arena.allocator(), &server.document_store, handle, source_index, &tokenizer)) |result| {
|
||||
if (try server.analyser.getFieldAccessType(handle, source_index, &tokenizer)) |result| {
|
||||
const container_handle = result.unwrapped orelse result.original;
|
||||
|
||||
const container_handle_nodes = try container_handle.getAllTypesWithHandles(server.arena.allocator());
|
||||
@ -1195,9 +1192,7 @@ pub fn getSymbolFieldAccesses(
|
||||
.other => |n| n,
|
||||
else => continue,
|
||||
};
|
||||
try decls_with_handles.append(server.arena.allocator(), (try analysis.lookupSymbolContainer(
|
||||
server.arena.allocator(),
|
||||
&server.document_store,
|
||||
try decls_with_handles.append(server.arena.allocator(), (try server.analyser.lookupSymbolContainer(
|
||||
.{ .node = container_handle_node, .handle = ty.handle },
|
||||
name,
|
||||
true,
|
||||
@ -1263,7 +1258,7 @@ pub fn hoverDefinitionFieldAccess(
|
||||
|
||||
pub fn gotoDefinitionString(
|
||||
server: *Server,
|
||||
pos_context: analysis.PositionContext,
|
||||
pos_context: Analyser.PositionContext,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
@ -1320,7 +1315,7 @@ const DeclToCompletionContext = struct {
|
||||
either_descriptor: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) error{OutOfMemory}!void {
|
||||
pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: Analyser.DeclWithHandle) error{OutOfMemory}!void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -1345,9 +1340,9 @@ pub fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.
|
||||
const doc: Documentation = if (param.first_doc_comment) |doc_comments| .{ .MarkupContent = types.MarkupContent{
|
||||
.kind = doc_kind,
|
||||
.value = if (context.either_descriptor) |ed|
|
||||
try std.fmt.allocPrint(allocator, "`Conditionally available: {s}`\n\n{s}", .{ ed, try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false) })
|
||||
try std.fmt.allocPrint(allocator, "`Conditionally available: {s}`\n\n{s}", .{ ed, try Analyser.collectDocComments(allocator, tree, doc_comments, doc_kind, false) })
|
||||
else
|
||||
try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false),
|
||||
try Analyser.collectDocComments(allocator, tree, doc_comments, doc_kind, false),
|
||||
} } else null;
|
||||
|
||||
const first_token = ast.paramFirstToken(tree, param);
|
||||
@ -1406,7 +1401,7 @@ pub fn completeLabel(
|
||||
.completions = &completions,
|
||||
.orig_handle = handle,
|
||||
};
|
||||
try analysis.iterateLabels(handle, pos_index, declToCompletion, context);
|
||||
try Analyser.iterateLabels(handle, pos_index, declToCompletion, context);
|
||||
|
||||
return completions.toOwnedSlice(server.arena.allocator());
|
||||
}
|
||||
@ -1490,7 +1485,7 @@ pub fn completeGlobal(server: *Server, pos_index: usize, handle: *const Document
|
||||
.completions = &completions,
|
||||
.orig_handle = handle,
|
||||
};
|
||||
try analysis.iterateSymbolsGlobal(server.arena.allocator(), &server.document_store, handle, pos_index, declToCompletion, context);
|
||||
try server.analyser.iterateSymbolsGlobal(handle, pos_index, declToCompletion, context);
|
||||
try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.generic, server.config.*, null);
|
||||
|
||||
if (server.client_capabilities.label_details_support) {
|
||||
@ -1513,7 +1508,7 @@ pub fn completeFieldAccess(server: *Server, handle: *const DocumentStore.Handle,
|
||||
var held_loc = try allocator.dupeZ(u8, offsets.locToSlice(handle.text, loc));
|
||||
var tokenizer = std.zig.Tokenizer.init(held_loc);
|
||||
|
||||
const result = (try analysis.getFieldAccessType(allocator, &server.document_store, handle, source_index, &tokenizer)) orelse return null;
|
||||
const result = (try server.analyser.getFieldAccessType(handle, source_index, &tokenizer)) orelse return null;
|
||||
try server.typeToCompletion(&completions, result, handle, null);
|
||||
if (server.client_capabilities.label_details_support) {
|
||||
for (completions.items) |*item| {
|
||||
@ -1739,9 +1734,9 @@ pub fn completeFileSystemStringLiteral(
|
||||
arena: std.mem.Allocator,
|
||||
store: DocumentStore,
|
||||
handle: DocumentStore.Handle,
|
||||
pos_context: analysis.PositionContext,
|
||||
pos_context: Analyser.PositionContext,
|
||||
) ![]types.CompletionItem {
|
||||
var completions: analysis.CompletionSet = .{};
|
||||
var completions: Analyser.CompletionSet = .{};
|
||||
|
||||
const loc = pos_context.loc().?;
|
||||
var completing = handle.tree.source[loc.start + 1 .. loc.end - 1];
|
||||
@ -2217,6 +2212,9 @@ fn changeDocumentHandler(server: *Server, notification: types.DidChangeTextDocum
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
// whenever a document changes, any cached info is invalidated
|
||||
server.analyser.invalidate();
|
||||
|
||||
const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return;
|
||||
|
||||
const new_text = try diff.applyContentChanges(server.allocator, handle.text, notification.contentChanges, server.offset_encoding);
|
||||
@ -2288,7 +2286,7 @@ fn semanticTokensFullHandler(server: *Server, request: types.SemanticTokensParam
|
||||
|
||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, null, server.offset_encoding);
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, null, server.offset_encoding);
|
||||
}
|
||||
|
||||
fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens {
|
||||
@ -2300,7 +2298,7 @@ fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRang
|
||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||
const loc = offsets.rangeToLoc(handle.tree.source, request.range, server.offset_encoding);
|
||||
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, loc, server.offset_encoding);
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, loc, server.offset_encoding);
|
||||
}
|
||||
|
||||
pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList {
|
||||
@ -2317,7 +2315,7 @@ pub fn completionHandler(server: *Server, request: types.CompletionParams) Error
|
||||
}
|
||||
|
||||
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
|
||||
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, false);
|
||||
const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, false);
|
||||
|
||||
const maybe_completions = switch (pos_context) {
|
||||
.builtin => try server.completeBuiltin(),
|
||||
@ -2345,7 +2343,7 @@ pub fn completionHandler(server: *Server, request: types.CompletionParams) Error
|
||||
// The cursor is in the middle of a word or before a @, so we can replace
|
||||
// the remaining identifier with the completion instead of just inserting.
|
||||
// TODO Identify function call/struct init and replace the whole thing.
|
||||
const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
const lookahead_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
if (server.client_capabilities.supports_apply_edits and
|
||||
pos_context != .import_string_literal and
|
||||
pos_context != .cinclude_string_literal and
|
||||
@ -2403,7 +2401,7 @@ pub fn signatureHelpHandler(server: *Server, request: types.SignatureHelpParams)
|
||||
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
|
||||
|
||||
const signature_info = (try getSignatureInfo(
|
||||
&server.document_store,
|
||||
&server.analyser,
|
||||
server.arena.allocator(),
|
||||
handle,
|
||||
source_index,
|
||||
@ -2429,7 +2427,7 @@ pub fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, r
|
||||
if (request.position.character == 0) return null;
|
||||
|
||||
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
|
||||
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
|
||||
return switch (pos_context) {
|
||||
.builtin => |loc| .{ .Location = (try server.gotoDefinitionBuiltin(handle, loc)) orelse return null },
|
||||
@ -2473,7 +2471,7 @@ pub fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Ho
|
||||
if (request.position.character == 0) return null;
|
||||
|
||||
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
|
||||
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
|
||||
const response = switch (pos_context) {
|
||||
.builtin => try server.hoverDefinitionBuiltin(source_index, handle),
|
||||
@ -2636,7 +2634,7 @@ pub fn generalReferencesHandler(server: *Server, request: GeneralReferencesReque
|
||||
if (request.position().character <= 0) return null;
|
||||
|
||||
const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding);
|
||||
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
const pos_context = try Analyser.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
|
||||
// TODO: Make this work with branching types
|
||||
const decl = switch (pos_context) {
|
||||
@ -2663,7 +2661,7 @@ pub fn generalReferencesHandler(server: *Server, request: GeneralReferencesReque
|
||||
else
|
||||
try references.symbolReferences(
|
||||
allocator,
|
||||
&server.document_store,
|
||||
&server.analyser,
|
||||
decl,
|
||||
server.offset_encoding,
|
||||
include_decl,
|
||||
@ -2736,7 +2734,7 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
|
||||
const hints = try inlay_hints.writeRangeInlayHint(
|
||||
server.arena.allocator(),
|
||||
server.config.*,
|
||||
&server.document_store,
|
||||
&server.analyser,
|
||||
handle,
|
||||
loc,
|
||||
hover_kind,
|
||||
@ -2786,7 +2784,7 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]
|
||||
|
||||
var builder = code_actions.Builder{
|
||||
.arena = server.arena.allocator(),
|
||||
.document_store = &server.document_store,
|
||||
.analyser = &server.analyser,
|
||||
.handle = handle,
|
||||
.offset_encoding = server.offset_encoding,
|
||||
};
|
||||
@ -3203,16 +3201,12 @@ pub fn create(
|
||||
recording_enabled: bool,
|
||||
replay_enabled: bool,
|
||||
) !*Server {
|
||||
// TODO replace global with something like an Analyser struct
|
||||
// which contains using_trail & resolve_trail and place it inside Server
|
||||
// see: https://github.com/zigtools/zls/issues/536
|
||||
analysis.init(allocator);
|
||||
|
||||
const server = try allocator.create(Server);
|
||||
server.* = Server{
|
||||
.config = config,
|
||||
.runtime_zig_version = null,
|
||||
.allocator = allocator,
|
||||
.analyser = undefined,
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.document_store = .{
|
||||
.allocator = allocator,
|
||||
@ -3224,6 +3218,7 @@ pub fn create(
|
||||
.replay_enabled = replay_enabled,
|
||||
.status = .uninitialized,
|
||||
};
|
||||
server.analyser = Analyser.init(allocator, server.arena.allocator(), &server.document_store);
|
||||
|
||||
try configuration.configChanged(config, &server.runtime_zig_version, allocator, config_path);
|
||||
|
||||
@ -3232,7 +3227,7 @@ pub fn create(
|
||||
|
||||
pub fn destroy(server: *Server) void {
|
||||
server.document_store.deinit();
|
||||
analysis.deinit();
|
||||
server.analyser.deinit();
|
||||
|
||||
if (server.builtin_completions) |*completions| completions.deinit(server.allocator);
|
||||
|
||||
|
488
src/analysis.zig
488
src/analysis.zig
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const ast = @import("ast.zig");
|
||||
|
||||
const types = @import("lsp.zig");
|
||||
@ -10,7 +10,7 @@ const offsets = @import("offsets.zig");
|
||||
|
||||
pub const Builder = struct {
|
||||
arena: std.mem.Allocator,
|
||||
document_store: *DocumentStore,
|
||||
analyser: *Analyser,
|
||||
handle: *const DocumentStore.Handle,
|
||||
offset_encoding: offsets.Encoding,
|
||||
|
||||
@ -88,9 +88,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman
|
||||
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const decl = (try analysis.lookupSymbolGlobal(
|
||||
builder.arena,
|
||||
builder.document_store,
|
||||
const decl = (try builder.analyser.lookupSymbolGlobal(
|
||||
builder.handle,
|
||||
identifier_name,
|
||||
loc.start,
|
||||
@ -134,9 +132,7 @@ fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnma
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const decl = (try analysis.lookupSymbolGlobal(
|
||||
builder.arena,
|
||||
builder.document_store,
|
||||
const decl = (try builder.analyser.lookupSymbolGlobal(
|
||||
builder.handle,
|
||||
identifier_name,
|
||||
loc.start,
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const zig_builtin = @import("builtin");
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const types = @import("lsp.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
const tracy = @import("tracy.zig");
|
||||
@ -26,7 +26,7 @@ pub const InlayHint = struct {
|
||||
|
||||
const Builder = struct {
|
||||
arena: std.mem.Allocator,
|
||||
store: *DocumentStore,
|
||||
analyser: *Analyser,
|
||||
config: *const Config,
|
||||
handle: *const DocumentStore.Handle,
|
||||
hints: std.ArrayListUnmanaged(InlayHint),
|
||||
@ -64,7 +64,7 @@ const Builder = struct {
|
||||
/// `call` is the function call
|
||||
/// `decl_handle` should be a function protototype
|
||||
/// writes parameter hints into `builder.hints`
|
||||
fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.DeclWithHandle) !void {
|
||||
fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: Analyser.DeclWithHandle) !void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -85,7 +85,7 @@ fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.D
|
||||
var i: usize = 0;
|
||||
var it = fn_proto.iterate(&decl_tree);
|
||||
|
||||
if (try analysis.hasSelfParam(builder.arena, builder.store, decl_handle.handle, fn_proto)) {
|
||||
if (try builder.analyser.hasSelfParam(decl_handle.handle, fn_proto)) {
|
||||
_ = ast.nextFnParam(&it);
|
||||
}
|
||||
|
||||
@ -187,7 +187,7 @@ fn writeCallNodeHint(builder: *Builder, call: Ast.full.Call) !void {
|
||||
const source_index = offsets.tokenToIndex(tree, main_tokens[call.ast.fn_expr]);
|
||||
const name = offsets.tokenToSlice(tree, main_tokens[call.ast.fn_expr]);
|
||||
|
||||
if (try analysis.lookupSymbolGlobal(builder.arena, builder.store, handle, name, source_index)) |decl_handle| {
|
||||
if (try builder.analyser.lookupSymbolGlobal(handle, name, source_index)) |decl_handle| {
|
||||
try writeCallHint(builder, call, decl_handle);
|
||||
}
|
||||
},
|
||||
@ -204,13 +204,11 @@ fn writeCallNodeHint(builder: *Builder, call: Ast.full.Call) !void {
|
||||
|
||||
// note: we have the ast node, traversing it would probably yield better results
|
||||
// than trying to re-tokenize and re-parse it
|
||||
if (try analysis.getFieldAccessType(builder.arena, builder.store, handle, rhs_loc.end, &tokenizer)) |result| {
|
||||
if (try builder.analyser.getFieldAccessType(handle, rhs_loc.end, &tokenizer)) |result| {
|
||||
const container_handle = result.unwrapped orelse result.original;
|
||||
switch (container_handle.type.data) {
|
||||
.other => |container_handle_node| {
|
||||
if (try analysis.lookupSymbolContainer(
|
||||
builder.arena,
|
||||
builder.store,
|
||||
if (try builder.analyser.lookupSymbolContainer(
|
||||
.{ .node = container_handle_node, .handle = container_handle.handle },
|
||||
tree.tokenSlice(rhsToken),
|
||||
true,
|
||||
@ -285,7 +283,7 @@ fn writeNodeInlayHint(
|
||||
pub fn writeRangeInlayHint(
|
||||
arena: std.mem.Allocator,
|
||||
config: Config,
|
||||
store: *DocumentStore,
|
||||
analyser: *Analyser,
|
||||
handle: *const DocumentStore.Handle,
|
||||
loc: offsets.Loc,
|
||||
hover_kind: types.MarkupKind,
|
||||
@ -295,7 +293,7 @@ pub fn writeRangeInlayHint(
|
||||
|
||||
var builder: Builder = .{
|
||||
.arena = arena,
|
||||
.store = store,
|
||||
.analyser = analyser,
|
||||
.config = &config,
|
||||
.handle = handle,
|
||||
.hints = .{},
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const types = @import("lsp.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
const log = std.log.scoped(.zls_references);
|
||||
@ -9,7 +9,7 @@ const ast = @import("ast.zig");
|
||||
|
||||
pub fn labelReferences(
|
||||
allocator: std.mem.Allocator,
|
||||
decl: analysis.DeclWithHandle,
|
||||
decl: Analyser.DeclWithHandle,
|
||||
encoding: offsets.Encoding,
|
||||
include_decl: bool,
|
||||
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
|
||||
@ -57,8 +57,8 @@ const Builder = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
locations: std.ArrayListUnmanaged(types.Location) = .{},
|
||||
/// this is the declaration we are searching for
|
||||
decl_handle: analysis.DeclWithHandle,
|
||||
store: *DocumentStore,
|
||||
decl_handle: Analyser.DeclWithHandle,
|
||||
analyser: *Analyser,
|
||||
encoding: offsets.Encoding,
|
||||
|
||||
const Context = struct {
|
||||
@ -98,12 +98,10 @@ const Builder = struct {
|
||||
.identifier,
|
||||
.test_decl,
|
||||
=> {
|
||||
const identifier_token = analysis.getDeclNameToken(tree, node).?;
|
||||
const identifier_token = Analyser.getDeclNameToken(tree, node).?;
|
||||
if (token_tags[identifier_token] != .identifier) return;
|
||||
|
||||
const child = (try analysis.lookupSymbolGlobal(
|
||||
builder.allocator,
|
||||
builder.store,
|
||||
const child = (try builder.analyser.lookupSymbolGlobal(
|
||||
handle,
|
||||
offsets.tokenToSlice(tree, identifier_token),
|
||||
starts[identifier_token],
|
||||
@ -114,18 +112,8 @@ const Builder = struct {
|
||||
}
|
||||
},
|
||||
.field_access => {
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
defer bound_type_params.deinit(builder.store.allocator);
|
||||
const left_type = try analysis.resolveFieldAccessLhsType(
|
||||
builder.allocator,
|
||||
builder.store,
|
||||
(try analysis.resolveTypeOfNodeInternal(
|
||||
builder.allocator,
|
||||
builder.store,
|
||||
.{ .node = datas[node].lhs, .handle = handle },
|
||||
&bound_type_params,
|
||||
)) orelse return,
|
||||
&bound_type_params,
|
||||
const left_type = try builder.analyser.resolveFieldAccessLhsType(
|
||||
(try builder.analyser.resolveTypeOfNode(.{ .node = datas[node].lhs, .handle = handle })) orelse return,
|
||||
);
|
||||
|
||||
const left_type_node = switch (left_type.type.data) {
|
||||
@ -133,9 +121,7 @@ const Builder = struct {
|
||||
else => return,
|
||||
};
|
||||
|
||||
const child = (try analysis.lookupSymbolContainer(
|
||||
self.builder.allocator,
|
||||
builder.store,
|
||||
const child = (try builder.analyser.lookupSymbolContainer(
|
||||
.{ .node = left_type_node, .handle = left_type.handle },
|
||||
offsets.tokenToSlice(tree, datas[node].rhs),
|
||||
!left_type.type.is_type_val,
|
||||
@ -152,8 +138,8 @@ const Builder = struct {
|
||||
|
||||
pub fn symbolReferences(
|
||||
allocator: std.mem.Allocator,
|
||||
store: *DocumentStore,
|
||||
decl_handle: analysis.DeclWithHandle,
|
||||
analyser: *Analyser,
|
||||
decl_handle: Analyser.DeclWithHandle,
|
||||
encoding: offsets.Encoding,
|
||||
/// add `decl_handle` as a references
|
||||
include_decl: bool,
|
||||
@ -166,7 +152,7 @@ pub fn symbolReferences(
|
||||
|
||||
var builder = Builder{
|
||||
.allocator = allocator,
|
||||
.store = store,
|
||||
.analyser = analyser,
|
||||
.decl_handle = decl_handle,
|
||||
.encoding = encoding,
|
||||
};
|
||||
@ -194,7 +180,7 @@ pub fn symbolReferences(
|
||||
dependencies.deinit(allocator);
|
||||
}
|
||||
|
||||
for (store.handles.values()) |handle| {
|
||||
for (analyser.store.handles.values()) |handle| {
|
||||
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
|
||||
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
|
||||
continue;
|
||||
@ -207,7 +193,7 @@ pub fn symbolReferences(
|
||||
}
|
||||
handle_dependencies.deinit(allocator);
|
||||
}
|
||||
try store.collectDependencies(allocator, handle.*, &handle_dependencies);
|
||||
try analyser.store.collectDependencies(allocator, handle.*, &handle_dependencies);
|
||||
|
||||
try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len);
|
||||
for (handle_dependencies.items) |uri| {
|
||||
@ -217,7 +203,7 @@ pub fn symbolReferences(
|
||||
|
||||
for (dependencies.keys()) |uri| {
|
||||
if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
|
||||
const handle = store.getHandle(uri) orelse continue;
|
||||
const handle = analyser.store.getHandle(uri) orelse continue;
|
||||
|
||||
try builder.collectReferences(handle, 0);
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
const zig_builtin = @import("builtin");
|
||||
const offsets = @import("offsets.zig");
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const Ast = std.zig.Ast;
|
||||
const ast = @import("ast.zig");
|
||||
const types = @import("lsp.zig");
|
||||
@ -40,7 +40,7 @@ pub const TokenModifiers = packed struct(u16) {
|
||||
|
||||
const Builder = struct {
|
||||
arena: std.mem.Allocator,
|
||||
store: *DocumentStore,
|
||||
analyser: *Analyser,
|
||||
handle: *const DocumentStore.Handle,
|
||||
previous_source_index: usize = 0,
|
||||
previous_token: ?Ast.TokenIndex = null,
|
||||
@ -203,7 +203,7 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H
|
||||
});
|
||||
}
|
||||
|
||||
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||
fn colorIdentifierBasedOnType(builder: *Builder, type_node: Analyser.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||
if (type_node.type.is_type_val) {
|
||||
var new_tok_mod = tok_mod;
|
||||
if (type_node.isNamespace())
|
||||
@ -300,7 +300,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
.aligned_var_decl,
|
||||
=> {
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
||||
try writeDocComments(builder, tree, comment_idx);
|
||||
|
||||
try writeToken(builder, var_decl.visib_token, .keyword);
|
||||
@ -309,7 +309,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
try writeToken(builder, var_decl.comptime_token, .keyword);
|
||||
try writeToken(builder, var_decl.ast.mut_token, .keyword);
|
||||
|
||||
if (try analysis.resolveTypeOfNode(allocator, builder.store, .{ .node = node, .handle = handle })) |decl_type| {
|
||||
if (try builder.analyser.resolveTypeOfNode(.{ .node = node, .handle = handle })) |decl_type| {
|
||||
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true });
|
||||
} else {
|
||||
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
|
||||
@ -393,15 +393,13 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
|
||||
if (std.mem.eql(u8, name, "_")) {
|
||||
return;
|
||||
} else if (analysis.isValueIdent(name)) {
|
||||
} else if (Analyser.isValueIdent(name)) {
|
||||
return try writeToken(builder, main_token, .keywordLiteral);
|
||||
} else if (analysis.isTypeIdent(name)) {
|
||||
} else if (Analyser.isTypeIdent(name)) {
|
||||
return try writeToken(builder, main_token, .type);
|
||||
}
|
||||
|
||||
if (try analysis.lookupSymbolGlobal(
|
||||
allocator,
|
||||
builder.store,
|
||||
if (try builder.analyser.lookupSymbolGlobal(
|
||||
handle,
|
||||
name,
|
||||
tree.tokens.items(.start)[main_token],
|
||||
@ -409,10 +407,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
if (child.decl.* == .param_payload) {
|
||||
return try writeToken(builder, main_token, .parameter);
|
||||
}
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
defer bound_type_params.deinit(builder.store.allocator);
|
||||
|
||||
if (try child.resolveType(allocator, builder.store, &bound_type_params)) |decl_type| {
|
||||
if (try child.resolveType(builder.analyser)) |decl_type| {
|
||||
return try colorIdentifierBasedOnType(builder, decl_type, main_token, .{});
|
||||
}
|
||||
}
|
||||
@ -426,7 +421,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, fn_proto.visib_token, .keyword);
|
||||
@ -434,14 +429,14 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
try writeToken(builder, fn_proto.lib_name, .string);
|
||||
try writeToken(builder, fn_proto.ast.fn_token, .keyword);
|
||||
|
||||
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto))
|
||||
const func_name_tok_type: TokenType = if (Analyser.isTypeFunction(tree, fn_proto))
|
||||
.type
|
||||
else
|
||||
.function;
|
||||
|
||||
const tok_mod = TokenModifiers{
|
||||
.declaration = true,
|
||||
.generic = analysis.isGenericFunction(tree, fn_proto),
|
||||
.generic = Analyser.isGenericFunction(tree, fn_proto),
|
||||
};
|
||||
|
||||
try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod);
|
||||
@ -477,7 +472,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
.@"comptime",
|
||||
.@"nosuspend",
|
||||
=> {
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
try writeDocComments(builder, tree, doc);
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
|
||||
@ -617,9 +612,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
if (struct_init.ast.type_expr != 0) {
|
||||
try callWriteNodeTokens(allocator, .{ builder, struct_init.ast.type_expr });
|
||||
|
||||
field_token_type = if (try analysis.resolveTypeOfNode(
|
||||
allocator,
|
||||
builder.store,
|
||||
field_token_type = if (try builder.analyser.resolveTypeOfNode(
|
||||
.{ .node = struct_init.ast.type_expr, .handle = handle },
|
||||
)) |struct_type| switch (struct_type.type.data) {
|
||||
.other => |type_node| if (ast.isContainer(struct_type.handle.tree, type_node))
|
||||
@ -775,7 +768,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
.asm_input,
|
||||
=> unreachable,
|
||||
.test_decl => {
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
||||
try writeDocComments(builder, tree, doc);
|
||||
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
@ -869,27 +862,14 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
|
||||
// writeToken code.
|
||||
// Maybe we can hook into it instead? Also applies to Identifier and VarDecl
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
defer bound_type_params.deinit(builder.store.allocator);
|
||||
|
||||
const lhs_type = try analysis.resolveFieldAccessLhsType(
|
||||
allocator,
|
||||
builder.store,
|
||||
(try analysis.resolveTypeOfNodeInternal(
|
||||
allocator,
|
||||
builder.store,
|
||||
.{ .node = data.lhs, .handle = handle },
|
||||
&bound_type_params,
|
||||
)) orelse return,
|
||||
&bound_type_params,
|
||||
const lhs_type = try builder.analyser.resolveFieldAccessLhsType(
|
||||
(try builder.analyser.resolveTypeOfNode(.{ .node = data.lhs, .handle = handle })) orelse return,
|
||||
);
|
||||
const left_type_node = switch (lhs_type.type.data) {
|
||||
.other => |n| n,
|
||||
else => return,
|
||||
};
|
||||
if (try analysis.lookupSymbolContainer(
|
||||
allocator,
|
||||
builder.store,
|
||||
if (try builder.analyser.lookupSymbolContainer(
|
||||
.{ .node = left_type_node, .handle = lhs_type.handle },
|
||||
tree.tokenSlice(data.rhs),
|
||||
!lhs_type.type.is_type_val,
|
||||
@ -913,7 +893,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (try decl_type.resolveType(allocator, builder.store, &bound_type_params)) |resolved_type| {
|
||||
if (try decl_type.resolveType(builder.analyser)) |resolved_type| {
|
||||
try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{});
|
||||
}
|
||||
}
|
||||
@ -992,7 +972,7 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
|
||||
|
||||
var allocator = builder.arena;
|
||||
|
||||
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
|
||||
if (Analyser.getDocCommentTokenIndex(tokens, base)) |docs|
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
try writeToken(builder, container_field.comptime_token, .keyword);
|
||||
@ -1026,14 +1006,14 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
|
||||
/// TODO edit version.
|
||||
pub fn writeSemanticTokens(
|
||||
arena: std.mem.Allocator,
|
||||
store: *DocumentStore,
|
||||
analyser: *Analyser,
|
||||
handle: *const DocumentStore.Handle,
|
||||
loc: ?offsets.Loc,
|
||||
encoding: offsets.Encoding,
|
||||
) error{OutOfMemory}!types.SemanticTokens {
|
||||
var builder = Builder{
|
||||
.arena = arena,
|
||||
.store = store,
|
||||
.analyser = analyser,
|
||||
.handle = handle,
|
||||
.encoding = encoding,
|
||||
};
|
||||
|
@ -1,5 +1,5 @@
|
||||
const std = @import("std");
|
||||
const analysis = @import("analysis.zig");
|
||||
const Analyser = @import("analysis.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const types = @import("lsp.zig");
|
||||
@ -8,14 +8,14 @@ const Token = std.zig.Token;
|
||||
const identifierFromPosition = @import("Server.zig").identifierFromPosition;
|
||||
const ast = @import("ast.zig");
|
||||
|
||||
fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
|
||||
fn fnProtoToSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
|
||||
const tree = handle.tree;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const label = analysis.getFunctionSignature(tree, proto);
|
||||
const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .markdown)) orelse "";
|
||||
const label = Analyser.getFunctionSignature(tree, proto);
|
||||
const proto_comments = (try Analyser.getDocComments(alloc, tree, fn_node, .markdown)) orelse "";
|
||||
|
||||
const arg_idx = if (skip_self_param) blk: {
|
||||
const has_self_param = try analysis.hasSelfParam(alloc, document_store, handle, proto);
|
||||
const has_self_param = try analyser.hasSelfParam(handle, proto);
|
||||
break :blk commas + @boolToInt(has_self_param);
|
||||
} else commas;
|
||||
|
||||
@ -23,7 +23,7 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocat
|
||||
var param_it = proto.iterate(&tree);
|
||||
while (ast.nextFnParam(¶m_it)) |param| {
|
||||
const param_comments = if (param.first_doc_comment) |dc|
|
||||
try analysis.collectDocComments(alloc, tree, dc, .markdown, false)
|
||||
try Analyser.collectDocComments(alloc, tree, dc, .markdown, false)
|
||||
else
|
||||
"";
|
||||
|
||||
@ -70,8 +70,8 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocat
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator, handle: *const DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation {
|
||||
const innermost_block = analysis.innermostBlockScope(handle.*, absolute_index);
|
||||
pub fn getSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, handle: *const DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation {
|
||||
const innermost_block = Analyser.innermostBlockScope(handle.*, absolute_index);
|
||||
const tree = handle.tree;
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
@ -256,9 +256,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
|
||||
|
||||
// Resolve the expression.
|
||||
var tokenizer = std.zig.Tokenizer.init(held_expr);
|
||||
if (try analysis.getFieldAccessType(
|
||||
alloc,
|
||||
document_store,
|
||||
if (try analyser.getFieldAccessType(
|
||||
handle,
|
||||
expr_start,
|
||||
&tokenizer,
|
||||
@ -275,7 +273,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| {
|
||||
return try fnProtoToSignatureInfo(
|
||||
document_store,
|
||||
analyser,
|
||||
alloc,
|
||||
paren_commas,
|
||||
false,
|
||||
@ -292,9 +290,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
|
||||
}
|
||||
|
||||
const skip_self_param = !type_handle.type.is_type_val;
|
||||
const decl_handle = (try analysis.lookupSymbolContainer(
|
||||
alloc,
|
||||
document_store,
|
||||
const decl_handle = (try analyser.lookupSymbolContainer(
|
||||
.{ .node = node, .handle = type_handle.handle },
|
||||
name,
|
||||
true,
|
||||
@ -311,9 +307,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
|
||||
},
|
||||
};
|
||||
|
||||
if (try analysis.resolveVarDeclAlias(
|
||||
alloc,
|
||||
document_store,
|
||||
if (try analyser.resolveVarDeclAlias(
|
||||
.{ .node = node, .handle = decl_handle.handle },
|
||||
)) |resolved| {
|
||||
switch (resolved.decl.*) {
|
||||
@ -327,7 +321,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, alloc: std.mem.Allocator
|
||||
|
||||
if (res_handle.tree.fullFnProto(&buf, node)) |proto| {
|
||||
return try fnProtoToSignatureInfo(
|
||||
document_store,
|
||||
analyser,
|
||||
alloc,
|
||||
paren_commas,
|
||||
skip_self_param,
|
||||
|
@ -2,7 +2,7 @@
|
||||
//! zigbot9001 to take advantage of zls' tools
|
||||
|
||||
pub const ast = @import("ast.zig");
|
||||
pub const analysis = @import("analysis.zig");
|
||||
pub const Analyser = @import("analysis.zig");
|
||||
pub const Header = @import("Header.zig");
|
||||
pub const debug = @import("debug.zig");
|
||||
pub const offsets = @import("offsets.zig");
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const zls = @import("zls");
|
||||
|
||||
const analysis = zls.analysis;
|
||||
const Analyser = zls.Analyser;
|
||||
const types = zls.types;
|
||||
const offsets = zls.offsets;
|
||||
|
||||
@ -510,12 +510,12 @@ test "position context - empty" {
|
||||
);
|
||||
}
|
||||
|
||||
fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), maybe_range: ?[]const u8) !void {
|
||||
fn testContext(line: []const u8, tag: std.meta.Tag(Analyser.PositionContext), maybe_range: ?[]const u8) !void {
|
||||
const cursor_idx = std.mem.indexOf(u8, line, "<cursor>").?;
|
||||
const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] });
|
||||
defer allocator.free(final_line);
|
||||
|
||||
const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx, true);
|
||||
const ctx = try Analyser.getPositionContext(allocator, final_line, cursor_idx, true);
|
||||
|
||||
if (std.meta.activeTag(ctx) != tag) {
|
||||
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
|
||||
|
Loading…
Reference in New Issue
Block a user