From dc3199cffac83b3ea37641824e2d35ba34ca0dcd Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Tue, 23 Aug 2022 12:44:26 +0200 Subject: [PATCH] unmanage all the code --- src/DocumentStore.zig | 8 +- src/Server.zig | 111 +++++++++------- src/analysis.zig | 249 +++++++++++++++-------------------- src/inlay_hints.zig | 10 +- src/references.zig | 12 +- src/rename.zig | 14 +- src/semantic_tokens.zig | 16 ++- src/special/build_runner.zig | 39 +++--- src/translate_c.zig | 18 +-- src/types.zig | 2 +- src/uri.zig | 12 +- tests/context.zig | 6 +- 12 files changed, 239 insertions(+), 258 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 60530f5..ddd87b1 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -538,14 +538,12 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { } fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 { - const collected_imports = try analysis.collectImports(self.allocator, handle.tree); - - var imports = std.ArrayList([]const u8).fromOwnedSlice(self.allocator, collected_imports); + var imports = try analysis.collectImports(self.allocator, handle.tree); errdefer { for (imports.items) |imp| { self.allocator.free(imp); } - imports.deinit(); + imports.deinit(self.allocator); } // Convert to URIs @@ -559,7 +557,7 @@ fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 _ = imports.swapRemove(i); } } - return imports.toOwnedSlice(); + return imports.toOwnedSlice(self.allocator); } pub const CImportSource = struct { diff --git a/src/Server.zig b/src/Server.zig index 7923767..7464eab 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -130,10 +130,10 @@ fn send(writer: anytype, allocator: std.mem.Allocator, reqOrRes: anytype) !void const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var arr = std.ArrayList(u8).init(allocator); - defer arr.deinit(); + var arr = std.ArrayListUnmanaged(u8){}; + defer arr.deinit(allocator); - try std.json.stringify(reqOrRes, .{}, arr.writer()); + try std.json.stringify(reqOrRes, .{}, arr.writer(allocator)); try writer.print("Content-Length: {}\r\n\r\n", .{arr.items.len}); try writer.writeAll(arr.items); @@ -216,7 +216,8 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha const tree = handle.tree; - var diagnostics = std.ArrayList(types.Diagnostic).init(server.arena.allocator()); + var allocator = server.arena.allocator(); + var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){}; for (tree.errors) |err| { const loc = tree.tokenLocation(0, err.token); @@ -225,7 +226,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha var fbs = std.io.fixedBufferStream(&mem_buffer); try tree.renderError(err, fbs.writer()); - try diagnostics.append(.{ + try diagnostics.append(allocator, .{ .range = astLocationToRange(loc), .severity = .Error, .code = @tagName(err.tag), @@ -236,7 +237,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha } if (server.config.enable_unused_variable_warnings) { - scopes: for (handle.document_scope.scopes) |scope| { + scopes: for (handle.document_scope.scopes.items) |scope| { const scope_data = switch (scope.data) { .function => |f| b: { if (!ast.fnProtoHasBody(tree, f).?) continue :scopes; @@ -294,7 +295,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha } if (identifier_count <= 1) - try diagnostics.append(.{ + try diagnostics.append(allocator, .{ .range = astLocationToRange(tree.tokenLocation(0, name_token_index)), .severity = .Error, .code = "unused_variable", @@ -323,7 +324,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha const import_str = tree.tokenSlice(import_str_token); if (std.mem.startsWith(u8, import_str, "\".")) { - try diagnostics.append(.{ + try diagnostics.append(allocator, .{ .range = astLocationToRange(tree.tokenLocation(0, import_str_token)), .severity = .Hint, .code = "useless_dot", @@ -356,7 +357,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha const func_name = tree.tokenSlice(name_token); if (!is_type_function and !analysis.isCamelCase(func_name)) { - try diagnostics.append(.{ + try diagnostics.append(allocator, .{ .range = astLocationToRange(loc), .severity = .Hint, .code = "bad_style", @@ -364,7 +365,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha .message = "Functions should be camelCase", }); } else if (is_type_function and !analysis.isPascalCase(func_name)) { - try diagnostics.append(.{ + try diagnostics.append(allocator, .{ .range = astLocationToRange(loc), .severity = .Hint, .code = "bad_style", @@ -393,10 +394,12 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha fn typeToCompletion( server: *Server, - list: *std.ArrayList(types.CompletionItem), + list: *std.ArrayListUnmanaged(types.CompletionItem), field_access: analysis.FieldAccessReturn, orig_handle: *DocumentStore.Handle, ) error{OutOfMemory}!void { + var allocator = server.arena.allocator(); + const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); @@ -404,14 +407,14 @@ fn typeToCompletion( switch (type_handle.type.data) { .slice => { if (!type_handle.type.is_type_val) { - try list.append(.{ + try list.append(allocator, .{ .label = "len", .detail = "const len: usize", .kind = .Field, .insertText = "len", .insertTextFormat = .PlainText, }); - try list.append(.{ + try list.append(allocator, .{ .label = "ptr", .kind = .Field, .insertText = "ptr", @@ -422,7 +425,7 @@ fn typeToCompletion( .error_union => {}, .pointer => |n| { if (server.config.operator_completions) { - try list.append(.{ + try list.append(allocator, .{ .label = "*", .kind = .Operator, .insertText = "*", @@ -452,7 +455,7 @@ fn typeToCompletion( fn nodeToCompletion( server: *Server, - list: *std.ArrayList(types.CompletionItem), + list: *std.ArrayListUnmanaged(types.CompletionItem), node_handle: analysis.NodeWithHandle, unwrapped: ?analysis.TypeWithHandle, orig_handle: *DocumentStore.Handle, @@ -462,6 +465,8 @@ fn nodeToCompletion( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var allocator = server.arena.allocator(); + const node = node_handle.node; const handle = node_handle.handle; const tree = handle.tree; @@ -474,7 +479,7 @@ fn nodeToCompletion( .PlainText; const doc = if (try analysis.getDocComments( - list.allocator, + allocator, handle.tree, node, doc_kind, @@ -525,7 +530,7 @@ fn nodeToCompletion( const is_type_function = analysis.isTypeFunction(handle.tree, func); - try list.append(.{ + try list.append(allocator, .{ .label = handle.tree.tokenSlice(name_token), .kind = if (is_type_function) .Struct else .Function, .documentation = doc, @@ -552,7 +557,7 @@ fn nodeToCompletion( return try declToCompletion(context, result); } - try list.append(.{ + try list.append(allocator, .{ .label = handle.tree.tokenSlice(var_decl.ast.mut_token + 1), .kind = if (is_const) .Constant else .Variable, .documentation = doc, @@ -566,7 +571,7 @@ fn nodeToCompletion( .container_field_init, => { const field = ast.containerField(tree, node).?; - try list.append(.{ + try list.append(allocator, .{ .label = handle.tree.tokenSlice(field.ast.name_token), .kind = .Field, .documentation = doc, @@ -578,7 +583,7 @@ fn nodeToCompletion( .array_type, .array_type_sentinel, => { - try list.append(.{ + try list.append(allocator, .{ .label = "len", .detail = "const len: usize", .kind = .Field, @@ -595,7 +600,7 @@ fn nodeToCompletion( switch (ptr_type.size) { .One, .C, .Many => if (server.config.operator_completions) { - try list.append(.{ + try list.append(allocator, .{ .label = "*", .kind = .Operator, .insertText = "*", @@ -603,13 +608,13 @@ fn nodeToCompletion( }); }, .Slice => { - try list.append(.{ + try list.append(allocator, .{ .label = "ptr", .kind = .Field, .insertText = "ptr", .insertTextFormat = .PlainText, }); - try list.append(.{ + try list.append(allocator, .{ .label = "len", .detail = "const len: usize", .kind = .Field, @@ -627,7 +632,7 @@ fn nodeToCompletion( }, .optional_type => { if (server.config.operator_completions) { - try list.append(.{ + try list.append(allocator, .{ .label = "?", .kind = .Operator, .insertText = "?", @@ -637,7 +642,7 @@ fn nodeToCompletion( return; }, .string_literal => { - try list.append(.{ + try list.append(allocator, .{ .label = "len", .detail = "const len: usize", .kind = .Field, @@ -646,7 +651,7 @@ fn nodeToCompletion( }); }, else => if (analysis.nodeToString(tree, node)) |string| { - try list.append(.{ + try list.append(allocator, .{ .label = string, .kind = .Field, .documentation = doc, @@ -786,7 +791,7 @@ fn hoverSymbol( .label_decl => |label_decl| tree.tokenSlice(label_decl), }; - var bound_type_params = analysis.BoundTypeParams.init(server.arena.allocator()); + var bound_type_params = analysis.BoundTypeParams{}; const resolved_type = try decl_handle.resolveType(&server.document_store, &server.arena, &bound_type_params); const resolved_type_str = if (resolved_type) |rt| @@ -1066,7 +1071,7 @@ fn renameDefinitionGlobal( const decl = (try server.getSymbolGlobal(pos_index, handle)) orelse return try respondGeneric(writer, id, null_result_response); var workspace_edit = types.WorkspaceEdit{ - .changes = std.StringHashMap([]types.TextEdit).init(server.arena.allocator()), + .changes = std.StringHashMapUnmanaged([]types.TextEdit){}, }; try rename.renameSymbol(&server.arena, &server.document_store, decl, new_name, &workspace_edit.changes.?, server.offset_encoding); try send(writer, server.arena.allocator(), types.Response{ @@ -1090,7 +1095,7 @@ fn renameDefinitionFieldAccess( const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response); var workspace_edit = types.WorkspaceEdit{ - .changes = std.StringHashMap([]types.TextEdit).init(server.arena.allocator()), + .changes = std.StringHashMapUnmanaged([]types.TextEdit){}, }; try rename.renameSymbol(&server.arena, &server.document_store, decl, new_name, &workspace_edit.changes.?, server.offset_encoding); try send(writer, server.arena.allocator(), types.Response{ @@ -1113,7 +1118,7 @@ fn renameDefinitionLabel( const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(writer, id, null_result_response); var workspace_edit = types.WorkspaceEdit{ - .changes = std.StringHashMap([]types.TextEdit).init(server.arena.allocator()), + .changes = std.StringHashMapUnmanaged([]types.TextEdit){}, }; try rename.renameLabel(&server.arena, decl, new_name, &workspace_edit.changes.?, server.offset_encoding); try send(writer, server.arena.allocator(), types.Response{ @@ -1149,7 +1154,8 @@ fn referencesDefinitionGlobal( ); const result: types.ResponseParams = if (highlight) result: { - var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len); + var highlights = std.ArrayListUnmanaged(types.DocumentHighlight){}; + try highlights.ensureTotalCapacity(server.arena.allocator(), locs.items.len); const uri = handle.uri(); for (locs.items) |loc| { if (std.mem.eql(u8, loc.uri, uri)) { @@ -1181,8 +1187,10 @@ fn referencesDefinitionFieldAccess( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var allocator = server.arena.allocator(); + const decl = (try server.getSymbolFieldAccess(handle, position, range)) orelse return try respondGeneric(writer, id, null_result_response); - var locs = std.ArrayList(types.Location).init(server.arena.allocator()); + var locs = std.ArrayList(types.Location).init(allocator); try references.symbolReferences( &server.arena, &server.document_store, @@ -1195,7 +1203,8 @@ fn referencesDefinitionFieldAccess( !highlight, ); const result: types.ResponseParams = if (highlight) result: { - var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len); + var highlights = std.ArrayListUnmanaged(types.DocumentHighlight){}; + try highlights.ensureTotalCapacity(allocator, locs.items.len); const uri = handle.uri(); for (locs.items) |loc| { if (std.mem.eql(u8, loc.uri, uri)) { @@ -1207,7 +1216,7 @@ fn referencesDefinitionFieldAccess( } break :result .{ .DocumentHighlight = highlights.items }; } else .{ .Locations = locs.items }; - try send(writer, server.arena.allocator(), types.Response{ + try send(writer, allocator, types.Response{ .id = id, .result = result, }); @@ -1225,11 +1234,14 @@ fn referencesDefinitionLabel( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var allocator = server.arena.allocator(); + const decl = (try getLabelGlobal(pos_index, handle)) orelse return try respondGeneric(writer, id, null_result_response); - var locs = std.ArrayList(types.Location).init(server.arena.allocator()); + var locs = std.ArrayList(types.Location).init(allocator); try references.labelReferences(&server.arena, decl, server.offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append); const result: types.ResponseParams = if (highlight) result: { - var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len); + var highlights = std.ArrayListUnmanaged(types.DocumentHighlight){}; + try highlights.ensureTotalCapacity(allocator, locs.items.len); const uri = handle.uri(); for (locs.items) |loc| { if (std.mem.eql(u8, loc.uri, uri)) { @@ -1241,7 +1253,8 @@ fn referencesDefinitionLabel( } break :result .{ .DocumentHighlight = highlights.items }; } else .{ .Locations = locs.items }; - try send(writer, server.arena.allocator(), types.Response{ + + try send(writer, allocator, types.Response{ .id = id, .result = result, }); @@ -1261,7 +1274,7 @@ fn hasComment(tree: Ast.Tree, start_token: Ast.TokenIndex, end_token: Ast.TokenI const DeclToCompletionContext = struct { server: *Server, - completions: *std.ArrayList(types.CompletionItem), + completions: *std.ArrayListUnmanaged(types.CompletionItem), orig_handle: *DocumentStore.Handle, parent_is_type_val: ?bool = null, }; @@ -1270,6 +1283,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var allocator = context.server.arena.allocator(); + const tree = decl_handle.handle.tree; switch (decl_handle.decl.*) { .ast_node => |node| try context.server.nodeToCompletion( @@ -1285,7 +1300,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl const doc = if (param.first_doc_comment) |doc_comments| types.MarkupContent{ .kind = doc_kind, - .value = try analysis.collectDocComments(context.server.arena.allocator(), tree, doc_comments, doc_kind, false), + .value = try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false), } else null; @@ -1296,7 +1311,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl tree.firstToken(param.type_expr); const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr); - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(param.name_token.?), .kind = .Constant, .documentation = doc, @@ -1306,7 +1321,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }); }, .pointer_payload => |payload| { - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(payload.name), .kind = .Variable, .insertText = tree.tokenSlice(payload.name), @@ -1314,7 +1329,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }); }, .array_payload => |payload| { - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(payload.identifier), .kind = .Variable, .insertText = tree.tokenSlice(payload.identifier), @@ -1322,7 +1337,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }); }, .array_index => |payload| { - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(payload), .kind = .Variable, .insertText = tree.tokenSlice(payload), @@ -1330,7 +1345,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }); }, .switch_payload => |payload| { - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(payload.node), .kind = .Variable, .insertText = tree.tokenSlice(payload.node), @@ -1338,7 +1353,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }); }, .label_decl => |label_decl| { - try context.completions.append(.{ + try context.completions.append(allocator, .{ .label = tree.tokenSlice(label_decl), .kind = .Variable, .insertText = tree.tokenSlice(label_decl), @@ -1358,7 +1373,7 @@ fn completeLabel( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var completions = std.ArrayList(types.CompletionItem).init(server.arena.allocator()); + var completions = std.ArrayListUnmanaged(types.CompletionItem){}; const context = DeclToCompletionContext{ .server = server, @@ -1430,7 +1445,7 @@ fn completeGlobal(server: *Server, writer: anytype, id: types.RequestId, pos_ind const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var completions = std.ArrayList(types.CompletionItem).init(server.arena.allocator()); + var completions = std.ArrayListUnmanaged(types.CompletionItem){}; const context = DeclToCompletionContext{ .server = server, @@ -1462,7 +1477,7 @@ fn completeFieldAccess(server: *Server, writer: anytype, id: types.RequestId, ha const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var completions = std.ArrayList(types.CompletionItem).init(server.arena.allocator()); + var completions = std.ArrayListUnmanaged(types.CompletionItem){}; const line_mem_start = @ptrToInt(position.line.ptr) - @ptrToInt(handle.document.mem.ptr); var held_range = handle.document.borrowNullTerminatedSlice(line_mem_start + range.start, line_mem_start + range.end); diff --git a/src/analysis.zig b/src/analysis.zig index ea81bc0..c6e9a45 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -123,13 +123,13 @@ fn fmtSnippetPlaceholder(bytes: []const u8) SnippetPlaceholderFormatter { pub fn getFunctionSnippet(allocator: std.mem.Allocator, tree: Ast, func: Ast.full.FnProto, skip_self_param: bool) ![]const u8 { const name_index = func.name_token.?; - var buffer = std.ArrayList(u8).init(allocator); - try buffer.ensureTotalCapacity(128); + var buffer = std.ArrayListUnmanaged(u8){}; + try buffer.ensureTotalCapacity(allocator, 128); - try buffer.appendSlice(tree.tokenSlice(name_index)); - try buffer.append('('); + var buf_stream = buffer.writer(allocator); - var buf_stream = buffer.writer(); + try buf_stream.writeAll(tree.tokenSlice(name_index)); + try buf_stream.writeByte('('); const token_tags = tree.tokens.items(.tag); @@ -138,29 +138,29 @@ pub fn getFunctionSnippet(allocator: std.mem.Allocator, tree: Ast, func: Ast.ful while (it.next()) |param| : (i += 1) { if (skip_self_param and i == 0) continue; if (i != @boolToInt(skip_self_param)) - try buffer.appendSlice(", ${") + try buf_stream.writeAll(", ${") else - try buffer.appendSlice("${"); + try buf_stream.writeAll("${"); try buf_stream.print("{d}:", .{i + 1}); if (param.comptime_noalias) |token_index| { if (token_tags[token_index] == .keyword_comptime) - try buffer.appendSlice("comptime ") + try buf_stream.writeAll("comptime ") else - try buffer.appendSlice("noalias "); + try buf_stream.writeAll("noalias "); } if (param.name_token) |name_token| { try buf_stream.print("{}", .{fmtSnippetPlaceholder(tree.tokenSlice(name_token))}); - try buffer.appendSlice(": "); + try buf_stream.writeAll(": "); } if (param.anytype_ellipsis3) |token_index| { if (token_tags[token_index] == .keyword_anytype) - try buffer.appendSlice("anytype") + try buf_stream.writeAll("anytype") else - try buffer.appendSlice("..."); + try buf_stream.writeAll("..."); } else if (param.type_expr != 0) { var curr_token = tree.firstToken(param.type_expr); var end_token = ast.lastToken(tree, param.type_expr); @@ -170,15 +170,15 @@ pub fn getFunctionSnippet(allocator: std.mem.Allocator, tree: Ast, func: Ast.ful if (curr_token == end_token and is_comma) continue; try buf_stream.print("{}", .{fmtSnippetPlaceholder(tree.tokenSlice(curr_token))}); - if (is_comma or tag == .keyword_const) try buffer.append(' '); + if (is_comma or tag == .keyword_const) try buf_stream.writeByte(' '); } } else unreachable; - try buffer.append('}'); + try buf_stream.writeByte('}'); } - try buffer.append(')'); + try buf_stream.writeByte(')'); - return buffer.toOwnedSlice(); + return buffer.toOwnedSlice(allocator); } pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentStore, handle: *DocumentStore.Handle, func: Ast.full.FnProto) !bool { @@ -222,7 +222,7 @@ pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 { } pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 { - if(field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) { + if (field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) { return ""; // TODO display the container's type } const start = offsets.tokenLocation(tree, field.ast.name_token).start; @@ -579,7 +579,7 @@ pub fn resolveFieldAccessLhsType(store: *DocumentStore, arena: *std.heap.ArenaAl return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs; } -pub const BoundTypeParams = std.AutoHashMap(Ast.full.FnProto.Param, TypeWithHandle); +pub const BoundTypeParams = std.AutoHashMapUnmanaged(Ast.full.FnProto.Param, TypeWithHandle); fn allDigits(str: []const u8) bool { for (str) |c| { @@ -737,7 +737,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl continue; if (!argument_type.type.is_type_val) continue; - _ = try bound_type_params.put(decl_param, argument_type); + try bound_type_params.put(arena.allocator(), decl_param, argument_type); } const has_body = decl.handle.tree.nodes.items(.tag)[decl_node] == .fn_decl; @@ -1082,13 +1082,13 @@ pub const TypeWithHandle = struct { }; pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator, node_handle: NodeWithHandle) error{OutOfMemory}!?TypeWithHandle { - var bound_type_params = BoundTypeParams.init(arena.allocator()); + var bound_type_params = BoundTypeParams{}; return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params); } /// Collects all `@import`'s we can find into a slice of import paths (without quotes). /// Caller owns returned memory. -pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}![][]const u8 { +pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}!std.ArrayListUnmanaged([]const u8) { var imports = std.ArrayListUnmanaged([]const u8){}; errdefer { for (imports.items) |imp| { @@ -1120,7 +1120,7 @@ pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory } } - return imports.toOwnedSlice(allocator); + return imports; } /// Collects all `@cImport` nodes @@ -1161,7 +1161,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator .handle = handle, }); - var bound_type_params = BoundTypeParams.init(arena.allocator()); + var bound_type_params = BoundTypeParams{}; while (true) { const tok = tokenizer.next(); @@ -1442,9 +1442,9 @@ const StackState = struct { stack_id: enum { Paren, Bracket, Global }, }; -fn peek(arr: *std.ArrayList(StackState)) !*StackState { +fn peek(allocator: std.mem.Allocator, arr: *std.ArrayListUnmanaged(StackState)) !*StackState { if (arr.items.len == 0) { - try arr.append(.{ .ctx = .empty, .stack_id = .Global }); + try arr.append(allocator, .{ .ctx = .empty, .stack_id = .Global }); } return &arr.items[arr.items.len - 1]; } @@ -1464,7 +1464,9 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. const line = doc_position.line; const line_mem_start = @ptrToInt(line.ptr) - @ptrToInt(document.mem.ptr); - var stack = try std.ArrayList(StackState).initCapacity(arena.allocator(), 8); + var stack = std.ArrayListUnmanaged(StackState){}; + try stack.ensureTotalCapacity(arena.allocator(), 8); + { var held_line = document.borrowNullTerminatedSlice( line_mem_start, @@ -1495,7 +1497,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. } // State changes - var curr_ctx = try peek(&stack); + var curr_ctx = try peek(arena.allocator(), &stack); switch (tok.tag) { .string_literal, .multiline_string_literal_line => string_lit_block: { if (curr_ctx.stack_id == .Paren and stack.items.len >= 2) { @@ -1551,18 +1553,18 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. .field_access => {}, else => curr_ctx.ctx = .empty, }, - .l_paren => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }), - .l_bracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }), + .l_paren => try stack.append(arena.allocator(), .{ .ctx = .empty, .stack_id = .Paren }), + .l_bracket => try stack.append(arena.allocator(), .{ .ctx = .empty, .stack_id = .Bracket }), .r_paren => { _ = stack.pop(); if (curr_ctx.stack_id != .Paren) { - (try peek(&stack)).ctx = .empty; + (try peek(arena.allocator(), &stack)).ctx = .empty; } }, .r_bracket => { _ = stack.pop(); if (curr_ctx.stack_id != .Bracket) { - (try peek(&stack)).ctx = .empty; + (try peek(arena.allocator(), &stack)).ctx = .empty; } }, .keyword_error => curr_ctx.ctx = .global_error_set, @@ -1759,7 +1761,7 @@ const GetDocumentSymbolsContext = struct { .column = 0, .offset = 0, }, - symbols: *std.ArrayList(types.DocumentSymbol), + symbols: *std.ArrayListUnmanaged(types.DocumentSymbol), encoding: offsets.Encoding, }; @@ -1794,7 +1796,7 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast }; const tags = tree.nodes.items(.tag); - (try context.symbols.addOne()).* = .{ + (try context.symbols.addOne(allocator)).* = .{ .name = name, .kind = switch (tags[node]) { .fn_proto, @@ -1824,7 +1826,7 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast .selectionRange = range, .detail = "", .children = ch: { - var children = std.ArrayList(types.DocumentSymbol).init(allocator); + var children = std.ArrayListUnmanaged(types.DocumentSymbol){}; var child_context = GetDocumentSymbolsContext{ .prev_loc = start_loc, @@ -1858,7 +1860,8 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast } pub fn getDocumentSymbols(allocator: std.mem.Allocator, tree: Ast, encoding: offsets.Encoding) ![]types.DocumentSymbol { - var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.rootDecls().len); + var symbols = std.ArrayListUnmanaged(types.DocumentSymbol){}; + try symbols.ensureTotalCapacity(allocator, tree.rootDecls().len); var context = GetDocumentSymbolsContext{ .symbols = &symbols, @@ -2022,7 +2025,7 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { if (!ast.isContainer(handle.tree, container)) return null; // Find the container scope. - return for (handle.document_scope.scopes) |*scope| { + return for (handle.document_scope.scopes.items) |*scope| { switch (scope.data) { .container => |node| if (node == container) { break scope; @@ -2069,7 +2072,7 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena try callback(context, decl); } - for (container_scope.uses) |use| { + for (container_scope.uses.items) |use| { const use_token = tree.nodes.items(.main_token)[use]; const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; if (handle != orig_handle and !is_pub) continue; @@ -2105,7 +2108,7 @@ pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllo } pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void { - for (handle.document_scope.scopes) |scope| { + for (handle.document_scope.scopes.items) |scope| { if (source_index >= scope.range.start and source_index < scope.range.end) { var decl_it = scope.decls.iterator(); while (decl_it.next()) |entry| { @@ -2121,7 +2124,7 @@ pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptim } fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void { - for (handle.document_scope.scopes) |scope| { + for (handle.document_scope.scopes.items) |scope| { if (source_index >= scope.range.start and source_index <= scope.range.end) { var decl_it = scope.decls.iterator(); while (decl_it.next()) |entry| { @@ -2131,7 +2134,7 @@ fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAll try callback(context, DeclWithHandle{ .decl = entry.value_ptr, .handle = handle }); } - for (scope.uses) |use| { + for (scope.uses.items) |use| { if (std.mem.indexOfScalar(Ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); @@ -2167,10 +2170,10 @@ pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocat } pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usize) usize { - if (handle.document_scope.scopes.len == 1) return 0; + if (handle.document_scope.scopes.items.len == 1) return 0; var current: usize = 0; - for (handle.document_scope.scopes[1..]) |*scope, idx| { + for (handle.document_scope.scopes.items[1..]) |*scope, idx| { if (source_index >= scope.range.start and source_index <= scope.range.end) { switch (scope.data) { .container, .function, .block => current = idx + 1, @@ -2183,14 +2186,14 @@ pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usiz } pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) Ast.Node.Index { - return handle.document_scope.scopes[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?; + return handle.document_scope.scopes.items[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?; } pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) TypeWithHandle { - var current = handle.document_scope.scopes[0].data.container; - if (handle.document_scope.scopes.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); + var current = handle.document_scope.scopes.items[0].data.container; + if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); - for (handle.document_scope.scopes[1..]) |scope| { + for (handle.document_scope.scopes.items[1..]) |scope| { if (source_index >= scope.range.start and source_index <= scope.range.end) { switch (scope.data) { .container => |node| current = node, @@ -2235,7 +2238,7 @@ fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []co } pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle { - for (handle.document_scope.scopes) |scope| { + for (handle.document_scope.scopes.items) |scope| { if (source_index >= scope.range.start and source_index < scope.range.end) { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value_ptr.*) { @@ -2258,7 +2261,7 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator var curr = innermost_scope_idx; while (curr >= 0) : (curr -= 1) { - const scope = &handle.document_scope.scopes[curr]; + const scope = &handle.document_scope.scopes.items[curr]; if (source_index >= scope.range.start and source_index <= scope.range.end) blk: { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value_ptr.*) { @@ -2273,7 +2276,7 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator .handle = handle, }; } - if (try resolveUse(store, arena, scope.uses, symbol, handle)) |result| return result; + if (try resolveUse(store, arena, scope.uses.items, symbol, handle)) |result| return result; } if (curr == 0) break; } @@ -2313,7 +2316,7 @@ pub fn lookupSymbolContainer( return DeclWithHandle{ .decl = candidate.value_ptr, .handle = handle }; } - if (try resolveUse(store, arena, container_scope.uses, symbol, handle)) |result| return result; + if (try resolveUse(store, arena, container_scope.uses.items, symbol, handle)) |result| return result; return null; } @@ -2344,12 +2347,12 @@ comptime { } pub const DocumentScope = struct { - scopes: []Scope, + scopes: std.ArrayListUnmanaged(Scope), error_completions: CompletionSet, enum_completions: CompletionSet, pub fn debugPrint(self: DocumentScope) void { - for (self.scopes) |scope| { + for (self.scopes.items) |scope| { log.debug( \\-------------------------- \\Scope {}, range: [{d}, {d}) @@ -2373,12 +2376,10 @@ pub const DocumentScope = struct { } pub fn deinit(self: *DocumentScope, allocator: std.mem.Allocator) void { - for (self.scopes) |*scope| { - scope.decls.deinit(); - allocator.free(scope.uses); - allocator.free(scope.tests); + for (self.scopes.items) |*scope| { + scope.deinit(allocator); } - allocator.free(self.scopes); + self.scopes.deinit(allocator); for (self.error_completions.entries.items(.key)) |item| { if (item.documentation) |doc| allocator.free(doc.value); } @@ -2399,12 +2400,17 @@ pub const Scope = struct { }; range: SourceRange, - decls: std.StringHashMap(Declaration), - tests: []const Ast.Node.Index = &.{}, - uses: []const Ast.Node.Index = &.{}, - + decls: std.StringHashMapUnmanaged(Declaration) = .{}, + tests: std.ArrayListUnmanaged(Ast.Node.Index) = .{}, + uses: std.ArrayListUnmanaged(Ast.Node.Index) = .{}, data: Data, + pub fn deinit(self: *Scope, allocator: std.mem.Allocator) void { + self.decls.deinit(allocator); + self.tests.deinit(allocator); + self.uses.deinit(allocator); + } + pub fn toNodeIndex(self: Scope) ?Ast.Node.Index { return switch (self.data) { .container, .function, .block => |idx| idx, @@ -2414,34 +2420,23 @@ pub const Scope = struct { }; pub fn makeDocumentScope(allocator: std.mem.Allocator, tree: Ast) !DocumentScope { - var scopes = std.ArrayListUnmanaged(Scope){}; - var error_completions = CompletionSet{}; - var enum_completions = CompletionSet{}; + var document_scope = DocumentScope{ + .scopes = .{}, + .error_completions = .{}, + .enum_completions = .{}, + }; + errdefer document_scope.deinit(allocator); - errdefer { - scopes.deinit(allocator); - for (error_completions.entries.items(.key)) |completion| { - if (completion.documentation) |doc| allocator.free(doc.value); - } - error_completions.deinit(allocator); - for (enum_completions.entries.items(.key)) |completion| { - if (completion.documentation) |doc| allocator.free(doc.value); - } - enum_completions.deinit(allocator); - } // pass root node index ('0') had_root = false; try makeScopeInternal(allocator, .{ - .scopes = &scopes, - .errors = &error_completions, - .enums = &enum_completions, + .scopes = &document_scope.scopes, + .errors = &document_scope.error_completions, + .enums = &document_scope.enum_completions, .tree = tree, }, 0); - return DocumentScope{ - .scopes = scopes.toOwnedSlice(allocator), - .error_completions = error_completions, - .enum_completions = enum_completions, - }; + + return document_scope; } fn nodeSourceRange(tree: Ast, node: Ast.Node.Index) SourceRange { @@ -2473,20 +2468,12 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: var buf: [2]Ast.Node.Index = undefined; const ast_decls = ast.declMembers(tree, node_idx, &buf); - (try scopes.addOne(allocator)).* = .{ + var scope = try scopes.addOne(allocator); + scope.* = .{ .range = nodeSourceRange(tree, node_idx), - .decls = std.StringHashMap(Declaration).init(allocator), .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; - var uses = std.ArrayListUnmanaged(Ast.Node.Index){}; - var tests = std.ArrayListUnmanaged(Ast.Node.Index){}; - - errdefer { - scopes.items[scope_idx].decls.deinit(); - uses.deinit(allocator); - tests.deinit(allocator); - } if (node_tag == .error_set_decl) { // All identifiers in main_token..data.lhs are error fields. @@ -2515,7 +2502,7 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: for (ast_decls) |decl| { if (tags[decl] == .@"usingnamespace") { - try uses.append(allocator, decl); + try scopes.items[scope_idx].uses.append(allocator, decl); continue; } @@ -2523,10 +2510,10 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: const name = getDeclName(tree, decl) orelse continue; if (tags[decl] == .test_decl) { - try tests.append(allocator, decl); + try scopes.items[scope_idx].tests.append(allocator, decl); continue; } - if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| { + if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = decl })) |existing| { _ = existing; // TODO Record a redefinition error. } @@ -2556,9 +2543,6 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: } } } - - scopes.items[scope_idx].tests = tests.toOwnedSlice(allocator); - scopes.items[scope_idx].uses = uses.toOwnedSlice(allocator); } // Whether we have already visited the root node. @@ -2610,19 +2594,18 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i var buf: [1]Ast.Node.Index = undefined; const func = ast.fnProto(tree, node_idx, &buf).?; - (try scopes.addOne(allocator)).* = .{ + try scopes.append(allocator, .{ .range = nodeSourceRange(tree, node_idx), - .decls = std.StringHashMap(Declaration).init(allocator), .data = .{ .function = node_idx }, - }; - var scope_idx = scopes.items.len - 1; - errdefer scopes.items[scope_idx].decls.deinit(); + }); + const scope_idx = scopes.items.len - 1; var it = func.iterate(&tree); while (it.next()) |param| { // Add parameter decls if (param.name_token) |name_token| { if (try scopes.items[scope_idx].decls.fetchPut( + allocator, tree.tokenSlice(name_token), .{ .param_decl = param }, )) |existing| { @@ -2665,46 +2648,36 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, main_tokens[node_idx]).start, .end = offsets.tokenLocation(tree, last_token).start, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); - try scope.decls.putNoClobber(tree.tokenSlice(first_token), .{ .label_decl = first_token }); + try scope.decls.putNoClobber(allocator, tree.tokenSlice(first_token), .{ .label_decl = first_token }); } - (try scopes.addOne(allocator)).* = .{ + try scopes.append(allocator, .{ .range = nodeSourceRange(tree, node_idx), - .decls = std.StringHashMap(Declaration).init(allocator), - .data = .{ .block = node_idx }, - }; - var scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(Ast.Node.Index).init(allocator); - - errdefer { - scopes.items[scope_idx].decls.deinit(); - uses.deinit(); - } + .data = .{ .container = node_idx }, + }); + const scope_idx = scopes.items.len - 1; var buffer: [2]Ast.Node.Index = undefined; const statements = ast.blockStatements(tree, node_idx, &buffer).?; for (statements) |idx| { if (tags[idx] == .@"usingnamespace") { - try uses.append(idx); + try scopes.items[scope_idx].uses.append(allocator, idx); continue; } try makeScopeInternal(allocator, context, idx); if (ast.varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); - if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { + if (try scopes.items[scope_idx].decls.fetchPut(allocator, name, .{ .ast_node = idx })) |existing| { _ = existing; // TODO record a redefinition error. } } } - scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, .@"if", @@ -2719,16 +2692,14 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, payload).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.then_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); std.debug.assert(token_tags[name_token] == .identifier); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(name, .{ + try scope.decls.putNoClobber(allocator, name, .{ .pointer_payload = .{ .name = name_token, .condition = if_node.ast.cond_expr, @@ -2747,13 +2718,11 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, err_token).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.else_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); const name = tree.tokenSlice(err_token); - try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr }); + try scope.decls.putNoClobber(allocator, name, .{ .ast_node = if_node.ast.else_expr }); } try makeScopeInternal(allocator, context, if_node.ast.else_expr); } @@ -2770,17 +2739,15 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, tree.firstToken(catch_expr)).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, catch_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); if (token_tags.len > catch_token + 2 and token_tags[catch_token + 1] == .pipe and token_tags[catch_token + 2] == .identifier) { const name = tree.tokenSlice(catch_token + 2); - try scope.decls.putNoClobber(name, .{ .ast_node = catch_expr }); + try scope.decls.putNoClobber(allocator, name, .{ .ast_node = catch_expr }); } try makeScopeInternal(allocator, context, catch_expr); }, @@ -2801,12 +2768,10 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, while_node.ast.while_token).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, node_idx)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); - try scope.decls.putNoClobber(tree.tokenSlice(label), .{ .label_decl = label }); + try scope.decls.putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = label }); } if (while_node.payload_token) |payload| { @@ -2816,16 +2781,14 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, payload).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.then_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); std.debug.assert(token_tags[name_token] == .identifier); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(name, if (is_for) .{ + try scope.decls.putNoClobber(allocator, name, if (is_for) .{ .array_payload = .{ .identifier = name_token, .array_expr = while_node.ast.cond_expr, @@ -2842,6 +2805,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i const index_token = name_token + 2; std.debug.assert(token_tags[index_token] == .identifier); if (try scope.decls.fetchPut( + allocator, tree.tokenSlice(index_token), .{ .array_index = index_token }, )) |existing| { @@ -2861,13 +2825,11 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, err_token).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.else_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); const name = tree.tokenSlice(err_token); - try scope.decls.putNoClobber(name, .{ .ast_node = while_node.ast.else_expr }); + try scope.decls.putNoClobber(allocator, name, .{ .ast_node = while_node.ast.else_expr }); } try makeScopeInternal(allocator, context, while_node.ast.else_expr); } @@ -2893,16 +2855,14 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, payload).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); // if payload is *name than get next token const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(name, .{ + try scope.decls.putNoClobber(allocator, name, .{ .switch_payload = .{ .node = name_token, .switch_expr = cond, @@ -3057,13 +3017,12 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i .start = offsets.tokenLocation(tree, payload_token).start, .end = offsets.tokenLocation(tree, ast.lastToken(tree, expr)).end, }, - .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, }; - errdefer scope.decls.deinit(); + errdefer scope.decls.deinit(allocator); const name = tree.tokenSlice(payload_token); - try scope.decls.putNoClobber(name, .{ .ast_node = expr }); + try scope.decls.putNoClobber(allocator, name, .{ .ast_node = expr }); } try makeScopeInternal(allocator, context, expr); diff --git a/src/inlay_hints.zig b/src/inlay_hints.zig index 8215e26..8e34196 100644 --- a/src/inlay_hints.zig +++ b/src/inlay_hints.zig @@ -33,7 +33,7 @@ const Builder = struct { allocator: std.mem.Allocator, config: *const Config, handle: *DocumentStore.Handle, - hints: std.ArrayList(types.InlayHint), + hints: std.ArrayListUnmanaged(types.InlayHint), hover_kind: types.MarkupContent.Kind, fn init(allocator: std.mem.Allocator, config: *const Config, handle: *DocumentStore.Handle, hover_kind: types.MarkupContent.Kind) Builder { @@ -41,7 +41,7 @@ const Builder = struct { .allocator = allocator, .config = config, .handle = handle, - .hints = std.ArrayList(types.InlayHint).init(allocator), + .hints = std.ArrayListUnmanaged(types.InlayHint){}, .hover_kind = hover_kind, }; } @@ -50,7 +50,7 @@ const Builder = struct { for (self.hints.items) |hint| { self.allocator.free(hint.tooltip.value); } - self.hints.deinit(); + self.hints.deinit(self.allocator); } fn appendParameterHint(self: *Builder, position: Ast.Location, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void { @@ -67,7 +67,7 @@ const Builder = struct { break :blk try std.fmt.allocPrint(self.allocator, "{s}{s}", .{ prefix, tooltip }); }; - try self.hints.append(.{ + try self.hints.append(self.allocator, .{ .position = .{ .line = @intCast(i64, position.line), .character = @intCast(i64, position.column), @@ -84,7 +84,7 @@ const Builder = struct { } fn toOwnedSlice(self: *Builder) []types.InlayHint { - return self.hints.toOwnedSlice(); + return self.hints.toOwnedSlice(self.allocator); } }; diff --git a/src/references.zig b/src/references.zig index 52507c2..050a8fe 100644 --- a/src/references.zig +++ b/src/references.zig @@ -393,7 +393,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); const rhs_str = tree.tokenSlice(datas[node].rhs); - var bound_type_params = analysis.BoundTypeParams.init(arena.allocator()); + var bound_type_params = analysis.BoundTypeParams{}; const left_type = try analysis.resolveFieldAccessLhsType( store, arena, @@ -481,7 +481,7 @@ pub fn symbolReferences(arena: *std.heap.ArenaAllocator, store: *DocumentStore, try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler); if (workspace) { - var imports = std.ArrayList(*DocumentStore.Handle).init(arena.allocator()); + var imports = std.ArrayListUnmanaged(*DocumentStore.Handle){}; var handle_it = store.handles.iterator(); while (handle_it.next()) |entry| { @@ -491,7 +491,7 @@ pub fn symbolReferences(arena: *std.heap.ArenaAllocator, store: *DocumentStore, } // Check entry's transitive imports - try imports.append(entry.value_ptr.*); + try imports.append(arena.allocator(), entry.value_ptr.*); var i: usize = 0; blk: while (i < imports.items.len) : (i += 1) { const import = imports.items[i]; @@ -511,17 +511,17 @@ pub fn symbolReferences(arena: *std.heap.ArenaAllocator, store: *DocumentStore, break :select; } } - try imports.append(h); + try imports.append(arena.allocator(), h); } } } - try imports.resize(0); + try imports.resize(arena.allocator(), 0); } } }, .param_decl => |param| { // Rename the param tok. - const fn_node: Ast.full.FnProto = loop: for (curr_handle.document_scope.scopes) |scope| { + const fn_node: Ast.full.FnProto = loop: for (curr_handle.document_scope.scopes.items) |scope| { switch (scope.data) { .function => |proto| { var buf: [1]Ast.Node.Index = undefined; diff --git a/src/rename.zig b/src/rename.zig index 8250de7..43ace34 100644 --- a/src/rename.zig +++ b/src/rename.zig @@ -7,25 +7,25 @@ const offsets = @import("offsets.zig"); // TODO Use a map to array lists and collect at the end instead? const RefHandlerContext = struct { - edits: *std.StringHashMap([]types.TextEdit), allocator: std.mem.Allocator, + edits: *std.StringHashMapUnmanaged([]types.TextEdit), new_name: []const u8, }; fn refHandler(context: RefHandlerContext, loc: types.Location) !void { var text_edits = if (context.edits.get(loc.uri)) |slice| - std.ArrayList(types.TextEdit).fromOwnedSlice(context.allocator, slice) + std.ArrayListUnmanaged(types.TextEdit){ .items = slice } else - std.ArrayList(types.TextEdit).init(context.allocator); + std.ArrayListUnmanaged(types.TextEdit){}; - (try text_edits.addOne()).* = .{ + (try text_edits.addOne(context.allocator)).* = .{ .range = loc.range, .newText = context.new_name, }; - try context.edits.put(loc.uri, text_edits.toOwnedSlice()); + try context.edits.put(context.allocator, loc.uri, text_edits.toOwnedSlice(context.allocator)); } -pub fn renameSymbol(arena: *std.heap.ArenaAllocator, store: *DocumentStore, decl_handle: analysis.DeclWithHandle, new_name: []const u8, edits: *std.StringHashMap([]types.TextEdit), encoding: offsets.Encoding) !void { +pub fn renameSymbol(arena: *std.heap.ArenaAllocator, store: *DocumentStore, decl_handle: analysis.DeclWithHandle, new_name: []const u8, edits: *std.StringHashMapUnmanaged([]types.TextEdit), encoding: offsets.Encoding) !void { std.debug.assert(decl_handle.decl.* != .label_decl); try references.symbolReferences(arena, store, decl_handle, encoding, true, RefHandlerContext{ .edits = edits, @@ -34,7 +34,7 @@ pub fn renameSymbol(arena: *std.heap.ArenaAllocator, store: *DocumentStore, decl }, refHandler, true, true); } -pub fn renameLabel(arena: *std.heap.ArenaAllocator, decl_handle: analysis.DeclWithHandle, new_name: []const u8, edits: *std.StringHashMap([]types.TextEdit), encoding: offsets.Encoding) !void { +pub fn renameLabel(arena: *std.heap.ArenaAllocator, decl_handle: analysis.DeclWithHandle, new_name: []const u8, edits: *std.StringHashMapUnmanaged([]types.TextEdit), encoding: offsets.Encoding) !void { std.debug.assert(decl_handle.decl.* == .label_decl); try references.labelReferences(arena, decl_handle, encoding, true, RefHandlerContext{ .edits = edits, diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 384beaa..d35e38c 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -51,16 +51,18 @@ pub const TokenModifiers = packed struct { }; const Builder = struct { + allocator: std.mem.Allocator, handle: *DocumentStore.Handle, previous_position: usize = 0, previous_token: ?Ast.TokenIndex = null, - arr: std.ArrayList(u32), + arr: std.ArrayListUnmanaged(u32), encoding: offsets.Encoding, fn init(allocator: std.mem.Allocator, handle: *DocumentStore.Handle, encoding: offsets.Encoding) Builder { return Builder{ + .allocator = allocator, .handle = handle, - .arr = std.ArrayList(u32).init(allocator), + .arr = std.ArrayListUnmanaged(u32){}, .encoding = encoding, }; } @@ -185,7 +187,7 @@ const Builder = struct { self.encoding, ) catch return; - try self.arr.appendSlice(&.{ + try self.arr.appendSlice(self.allocator, &.{ @truncate(u32, delta.line), @truncate(u32, delta.column), @truncate(u32, length), @@ -196,7 +198,7 @@ const Builder = struct { } fn toOwnedSlice(self: *Builder) []u32 { - return self.arr.toOwnedSlice(); + return self.arr.toOwnedSlice(self.allocator); } }; @@ -423,7 +425,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D if (child.decl.* == .param_decl) { return try writeToken(builder, main_token, .parameter); } - var bound_type_params = analysis.BoundTypeParams.init(arena.allocator()); + var bound_type_params = analysis.BoundTypeParams{}; if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); } else { @@ -859,7 +861,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // writeToken code. // Maybe we can hook into it insead? Also applies to Identifier and VarDecl - var bound_type_params = analysis.BoundTypeParams.init(arena.allocator()); + var bound_type_params = analysis.BoundTypeParams{}; const lhs_type = try analysis.resolveFieldAccessLhsType( store, arena, @@ -1008,7 +1010,7 @@ fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); - errdefer builder.arr.deinit(); + errdefer builder.arr.deinit(arena.child_allocator); // reverse the ast from the root declarations var buf: [2]Ast.Node.Index = undefined; diff --git a/src/special/build_runner.zig b/src/special/build_runner.zig index 0245b15..98ce81f 100644 --- a/src/special/build_runner.zig +++ b/src/special/build_runner.zig @@ -65,18 +65,18 @@ pub fn main() !void { builder.resolveInstallPrefix(null, Builder.DirList{}); try runBuild(builder); - var packages = std.ArrayList(BuildConfig.Pkg).init(allocator); - defer packages.deinit(); + var packages = std.ArrayListUnmanaged(BuildConfig.Pkg){}; + defer packages.deinit(allocator); - var include_dirs = std.ArrayList(BuildConfig.IncludeDir).init(allocator); - defer include_dirs.deinit(); + var include_dirs = std.ArrayListUnmanaged(BuildConfig.IncludeDir){}; + defer include_dirs.deinit(allocator); // TODO: We currently add packages from every LibExeObj step that the install step depends on. // Should we error out or keep one step or something similar? // We also flatten them, we should probably keep the nested structure. for (builder.top_level_steps.items) |tls| { for (tls.step.dependencies.items) |step| { - try processStep(&packages, &include_dirs, step); + try processStep(allocator, &packages, &include_dirs, step); } } @@ -91,29 +91,31 @@ pub fn main() !void { } fn processStep( - packages: *std.ArrayList(BuildConfig.Pkg), - include_dirs: *std.ArrayList(BuildConfig.IncludeDir), + allocator: std.mem.Allocator, + packages: *std.ArrayListUnmanaged(BuildConfig.Pkg), + include_dirs: *std.ArrayListUnmanaged(BuildConfig.IncludeDir), step: *std.build.Step, ) anyerror!void { if (step.cast(InstallArtifactStep)) |install_exe| { - try processIncludeDirs(include_dirs, install_exe.artifact.include_dirs.items); + try processIncludeDirs(allocator, include_dirs, install_exe.artifact.include_dirs.items); for (install_exe.artifact.packages.items) |pkg| { - try processPackage(packages, pkg); + try processPackage(allocator, packages, pkg); } } else if (step.cast(LibExeObjStep)) |exe| { - try processIncludeDirs(include_dirs, exe.include_dirs.items); + try processIncludeDirs(allocator, include_dirs, exe.include_dirs.items); for (exe.packages.items) |pkg| { - try processPackage(packages, pkg); + try processPackage(allocator, packages, pkg); } } else { for (step.dependencies.items) |unknown_step| { - try processStep(packages, include_dirs, unknown_step); + try processStep(allocator, packages, include_dirs, unknown_step); } } } fn processPackage( - packages: *std.ArrayList(BuildConfig.Pkg), + allocator: std.mem.Allocator, + packages: *std.ArrayListUnmanaged(BuildConfig.Pkg), pkg: std.build.Pkg, ) anyerror!void { for (packages.items) |package| { @@ -127,20 +129,23 @@ fn processPackage( }; if (maybe_path) |path| { - try packages.append(.{ .name = pkg.name, .path = path }); + try packages.append(allocator, .{ .name = pkg.name, .path = path }); } if (pkg.dependencies) |dependencies| { for (dependencies) |dep| { - try processPackage(packages, dep); + try processPackage(allocator, packages, dep); } } } fn processIncludeDirs( - include_dirs: *std.ArrayList(BuildConfig.IncludeDir), + allocator: std.mem.Allocator, + include_dirs: *std.ArrayListUnmanaged(BuildConfig.IncludeDir), dirs: []std.build.LibExeObjStep.IncludeDir, ) !void { + try include_dirs.ensureUnusedCapacity(allocator, dirs.len); + outer: for (dirs) |dir| { const candidate: BuildConfig.IncludeDir = switch (dir) { .raw_path => |path| .{ .path = path, .system = false }, @@ -152,7 +157,7 @@ fn processIncludeDirs( if (std.mem.eql(u8, candidate.path, include_dir.path)) continue :outer; } - try include_dirs.append(candidate); + include_dirs.appendAssumeCapacity(candidate); } } diff --git a/src/translate_c.zig b/src/translate_c.zig index 49d63b3..f9b51ec 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -27,8 +27,8 @@ pub fn convertCInclude(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.I std.debug.assert(ast.isBuiltinCall(tree, node)); std.debug.assert(std.mem.eql(u8, Ast.tokenSlice(tree, main_tokens[node]), "@cImport")); - var output = std.ArrayList(u8).init(allocator); - errdefer output.deinit(); + var output = std.ArrayListUnmanaged(u8){}; + errdefer output.deinit(allocator); var stack_allocator = std.heap.stackFallback(512, allocator); @@ -37,13 +37,15 @@ pub fn convertCInclude(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.I try convertCIncludeInternal(stack_allocator.get(), tree, child, &output); } - return output.toOwnedSlice(); + return output.toOwnedSlice(allocator); } -fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, output: *std.ArrayList(u8)) error{ OutOfMemory, Unsupported }!void { +fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, output: *std.ArrayListUnmanaged(u8)) error{ OutOfMemory, Unsupported }!void { const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); + var writer = output.writer(allocator); + var buffer: [2]Ast.Node.Index = undefined; if (ast.isBlock(tree, node)) { const FrameSize = @sizeOf(@Frame(convertCIncludeInternal)); @@ -62,7 +64,7 @@ fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.No const first = extractString(Ast.tokenSlice(tree, main_tokens[params[0]])); if (std.mem.eql(u8, call_name, "@cInclude")) { - try output.writer().print("#include <{s}>\n", .{first}); + try writer.print("#include <{s}>\n", .{first}); } else if (std.mem.eql(u8, call_name, "@cDefine")) { if (params.len < 2) return; @@ -70,14 +72,14 @@ fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.No const is_void = if (ast.blockStatements(tree, params[1], &buffer2)) |block| block.len == 0 else false; if (is_void) { - try output.writer().print("#define {s}\n", .{first}); + try writer.print("#define {s}\n", .{first}); } else { if (node_tags[params[1]] != .string_literal) return error.Unsupported; const second = extractString(Ast.tokenSlice(tree, main_tokens[params[1]])); - try output.writer().print("#define {s} {s}\n", .{ first, second }); + try writer.print("#define {s} {s}\n", .{ first, second }); } } else if (std.mem.eql(u8, call_name, "@cUndef")) { - try output.writer().print("#undefine {s}\n", .{first}); + try writer.print("#undefine {s}\n", .{first}); } else { return error.Unsupported; } diff --git a/src/types.zig b/src/types.zig index 2ea83b3..9a5696f 100644 --- a/src/types.zig +++ b/src/types.zig @@ -152,7 +152,7 @@ pub const TextDocument = struct { }; pub const WorkspaceEdit = struct { - changes: ?std.StringHashMap([]TextEdit), + changes: ?std.StringHashMapUnmanaged([]TextEdit), pub fn jsonStringify(self: WorkspaceEdit, options: std.json.StringifyOptions, writer: anytype) @TypeOf(writer).Error!void { try writer.writeByte('{'); diff --git a/src/uri.zig b/src/uri.zig index 4aa7ad0..7c56a01 100644 --- a/src/uri.zig +++ b/src/uri.zig @@ -23,16 +23,16 @@ pub fn fromPath(allocator: std.mem.Allocator, path: []const u8) ![]const u8 { if (path.len == 0) return ""; const prefix = if (builtin.os.tag == .windows) "file:///" else "file://"; - var buf = std.ArrayList(u8).init(allocator); - try buf.appendSlice(prefix); + var buf = std.ArrayListUnmanaged(u8){}; + try buf.appendSlice(allocator, prefix); for (path) |char| { if (char == std.fs.path.sep) { - try buf.append('/'); + try buf.append(allocator, '/'); } else if (std.mem.indexOfScalar(u8, reserved_chars, char)) |reserved| { - try buf.appendSlice(&reserved_escapes[reserved]); + try buf.appendSlice(allocator, &reserved_escapes[reserved]); } else { - try buf.append(char); + try buf.append(allocator, char); } } @@ -46,7 +46,7 @@ pub fn fromPath(allocator: std.mem.Allocator, path: []const u8) ![]const u8 { } } - return buf.toOwnedSlice(); + return buf.toOwnedSlice(allocator); } /// Move along `rel` from `base` with a single allocation. diff --git a/tests/context.zig b/tests/context.zig index d31e308..d084bb1 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -38,8 +38,8 @@ pub const Context = struct { params: []const u8, expect: ?[]const u8, ) !void { - var output = std.ArrayList(u8).init(allocator); - defer output.deinit(); + var output = std.ArrayListUnmanaged(u8){}; + defer output.deinit(allocator); // create the request self.request_id += 1; @@ -49,7 +49,7 @@ pub const Context = struct { defer allocator.free(req); // send the request to the server - try self.server.processJsonRpc(output.writer(), req); + try self.server.processJsonRpc(output.writer(allocator), req); // if we don't expect a response ignore it const expected = expect orelse return;