Fixed to use latest HashMap API

This commit is contained in:
Alexandros Naskos 2020-07-06 00:56:41 +03:00
parent 0f7a384b39
commit c067bce9fa
5 changed files with 28 additions and 32 deletions

View File

@ -1532,7 +1532,7 @@ pub const DeclWithHandle = struct {
if (pay.items[0].cast(ast.Node.EnumLiteral)) |enum_lit| { if (pay.items[0].cast(ast.Node.EnumLiteral)) |enum_lit| {
const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null; const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null;
if (scope.decls.get(self.handle.tree.tokenSlice(enum_lit.name))) |candidate| { if (scope.decls.getEntry(self.handle.tree.tokenSlice(enum_lit.name))) |candidate| {
switch (candidate.value) { switch (candidate.value) {
.ast_node => |node| { .ast_node => |node| {
if (node.cast(ast.Node.ContainerField)) |container_field| { if (node.cast(ast.Node.ContainerField)) |container_field| {
@ -1761,7 +1761,7 @@ pub fn lookupLabel(
) error{OutOfMemory}!?DeclWithHandle { ) error{OutOfMemory}!?DeclWithHandle {
for (handle.document_scope.scopes) |scope| { for (handle.document_scope.scopes) |scope| {
if (source_index >= scope.range.start and source_index < scope.range.end) { if (source_index >= scope.range.start and source_index < scope.range.end) {
if (scope.decls.get(symbol)) |candidate| { if (scope.decls.getEntry(symbol)) |candidate| {
switch (candidate.value) { switch (candidate.value) {
.label_decl => {}, .label_decl => {},
else => continue, else => continue,
@ -1787,7 +1787,7 @@ fn lookupSymbolGlobalInternal(
) error{OutOfMemory}!?DeclWithHandle { ) error{OutOfMemory}!?DeclWithHandle {
for (handle.document_scope.scopes) |scope| { for (handle.document_scope.scopes) |scope| {
if (source_index >= scope.range.start and source_index < scope.range.end) { if (source_index >= scope.range.start and source_index < scope.range.end) {
if (scope.decls.get(symbol)) |candidate| { if (scope.decls.getEntry(symbol)) |candidate| {
switch (candidate.value) { switch (candidate.value) {
.ast_node => |node| { .ast_node => |node| {
if (node.id == .ContainerField) continue; if (node.id == .ContainerField) continue;
@ -1840,7 +1840,7 @@ fn lookupSymbolContainerInternal(
false; false;
if (findContainerScope(container_handle)) |container_scope| { if (findContainerScope(container_handle)) |container_scope| {
if (container_scope.decls.get(symbol)) |candidate| { if (container_scope.decls.getEntry(symbol)) |candidate| {
switch (candidate.value) { switch (candidate.value) {
.ast_node => |node| { .ast_node => |node| {
if (node.id == .ContainerField) { if (node.id == .ContainerField) {
@ -1902,7 +1902,7 @@ pub const DocumentScope = struct {
} }
pub fn deinit(self: DocumentScope, allocator: *std.mem.Allocator) void { pub fn deinit(self: DocumentScope, allocator: *std.mem.Allocator) void {
for (self.scopes) |scope| { for (self.scopes) |*scope| {
scope.decls.deinit(); scope.decls.deinit();
allocator.free(scope.uses); allocator.free(scope.uses);
allocator.free(scope.tests); allocator.free(scope.tests);
@ -2005,7 +2005,7 @@ fn makeScopeInternal(
} }
} }
if (try scopes.items[scope_idx].decls.put(name, .{ .ast_node = decl })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| {
// TODO Record a redefinition error. // TODO Record a redefinition error.
} }
} }
@ -2031,7 +2031,7 @@ fn makeScopeInternal(
for (func.params()) |*param| { for (func.params()) |*param| {
if (param.name_token) |name_tok| { if (param.name_token) |name_tok| {
if (try scopes.items[scope_idx].decls.put(tree.tokenSlice(name_tok), .{ .param_decl = param })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(tree.tokenSlice(name_tok), .{ .param_decl = param })) |existing| {
// TODO Record a redefinition error // TODO Record a redefinition error
} }
} }
@ -2093,7 +2093,7 @@ fn makeScopeInternal(
try makeScopeInternal(allocator, scopes, tree, child_node); try makeScopeInternal(allocator, scopes, tree, child_node);
if (child_node.cast(ast.Node.VarDecl)) |var_decl| { if (child_node.cast(ast.Node.VarDecl)) |var_decl| {
const name = tree.tokenSlice(var_decl.name_token); const name = tree.tokenSlice(var_decl.name_token);
if (try scopes.items[scope_idx].decls.put(name, .{ .ast_node = child_node })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = child_node })) |existing| {
// TODO Record a redefinition error. // TODO Record a redefinition error.
} }
} }
@ -2282,7 +2282,7 @@ fn makeScopeInternal(
if (ptr_idx_payload.index_symbol) |index_symbol| { if (ptr_idx_payload.index_symbol) |index_symbol| {
std.debug.assert(index_symbol.id == .Identifier); std.debug.assert(index_symbol.id == .Identifier);
const index_name = tree.tokenSlice(index_symbol.firstToken()); const index_name = tree.tokenSlice(index_symbol.firstToken());
if (try scope.decls.put(index_name, .{ .ast_node = index_symbol })) |existing| { if (try scope.decls.fetchPut(index_name, .{ .ast_node = index_symbol })) |existing| {
// TODO Record a redefinition error // TODO Record a redefinition error
} }
} }

View File

@ -297,7 +297,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
} }
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
if (self.handles.get(uri)) |entry| { if (self.handles.getEntry(uri)) |entry| {
std.log.debug(.doc_store, "Document already open: {}, incrementing count\n", .{uri}); std.log.debug(.doc_store, "Document already open: {}, incrementing count\n", .{uri});
entry.value.count += 1; entry.value.count += 1;
if (entry.value.is_build_file) |build_file| { if (entry.value.is_build_file) |build_file| {
@ -337,7 +337,7 @@ fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void {
} }
fn decrementCount(self: *DocumentStore, uri: []const u8) void { fn decrementCount(self: *DocumentStore, uri: []const u8) void {
if (self.handles.get(uri)) |entry| { if (self.handles.getEntry(uri)) |entry| {
if (entry.value.count == 0) return; if (entry.value.count == 0) return;
entry.value.count -= 1; entry.value.count -= 1;
@ -375,11 +375,7 @@ pub fn closeDocument(self: *DocumentStore, uri: []const u8) void {
} }
pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
if (self.handles.get(uri)) |entry| { return self.handles.get(uri);
return entry.value;
}
return null;
} }
// Check if the document text is now sane, move it to sane_text if so. // Check if the document text is now sane, move it to sane_text if so.
@ -461,17 +457,17 @@ pub fn applyChanges(
const document = &handle.document; const document = &handle.document;
for (content_changes.items) |change| { for (content_changes.items) |change| {
if (change.Object.getValue("range")) |range| { if (change.Object.get("range")) |range| {
const start_pos = types.Position{ const start_pos = types.Position{
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer, .line = range.Object.get("start").?.Object.get("line").?.Integer,
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer, .character = range.Object.get("start").?.Object.get("character").?.Integer,
}; };
const end_pos = types.Position{ const end_pos = types.Position{
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer, .line = range.Object.get("end").?.Object.get("line").?.Integer,
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer, .character = range.Object.get("end").?.Object.get("character").?.Integer,
}; };
const change_text = change.Object.getValue("text").?.String; const change_text = change.Object.get("text").?.String;
const start_index = (try offsets.documentPosition(document.*, start_pos, .utf16)).absolute_index; const start_index = (try offsets.documentPosition(document.*, start_pos, .utf16)).absolute_index;
const end_index = (try offsets.documentPosition(document.*, end_pos, .utf16)).absolute_index; const end_index = (try offsets.documentPosition(document.*, end_pos, .utf16)).absolute_index;
@ -497,7 +493,7 @@ pub fn applyChanges(
// Reset the text substring. // Reset the text substring.
document.text = document.mem[0..new_len]; document.text = document.mem[0..new_len];
} else { } else {
const change_text = change.Object.getValue("text").?.String; const change_text = change.Object.get("text").?.String;
const old_len = document.text.len; const old_len = document.text.len;
if (change_text.len > document.mem.len) { if (change_text.len > document.mem.len) {

View File

@ -1036,7 +1036,7 @@ fn workspaceFoldersChangeHandler(arena: *std.heap.ArenaAllocator, id: types.Requ
for (req.params.event.added) |add| { for (req.params.event.added) |add| {
const duped_uri = try std.mem.dupe(allocator, u8, add.uri); const duped_uri = try std.mem.dupe(allocator, u8, add.uri);
if (try workspace_folder_configs.put(duped_uri, null)) |old| { if (try workspace_folder_configs.fetchPut(duped_uri, null)) |old| {
allocator.free(old.key); allocator.free(old.key);
if (old.value) |c| { if (old.value) |c| {
std.json.parseFree(Config, c, std.json.ParseOptions{ .allocator = allocator }); std.json.parseFree(Config, c, std.json.ParseOptions{ .allocator = allocator });
@ -1290,14 +1290,14 @@ fn processJsonRpc(arena: *std.heap.ArenaAllocator, parser: *std.json.Parser, jso
var tree = try parser.parse(json); var tree = try parser.parse(json);
defer tree.deinit(); defer tree.deinit();
const id = if (tree.root.Object.getValue("id")) |id| switch (id) { const id = if (tree.root.Object.get("id")) |id| switch (id) {
.Integer => |int| types.RequestId{ .Integer = int }, .Integer => |int| types.RequestId{ .Integer = int },
.String => |str| types.RequestId{ .String = str }, .String => |str| types.RequestId{ .String = str },
else => types.RequestId{ .Integer = 0 }, else => types.RequestId{ .Integer = 0 },
} else types.RequestId{ .Integer = 0 }; } else types.RequestId{ .Integer = 0 };
std.debug.assert(tree.root.Object.getValue("method") != null); std.debug.assert(tree.root.Object.get("method") != null);
const method = tree.root.Object.getValue("method").?.String; const method = tree.root.Object.get("method").?.String;
const start_time = std.time.milliTimestamp(); const start_time = std.time.milliTimestamp();
defer { defer {
@ -1377,7 +1377,7 @@ fn processJsonRpc(arena: *std.heap.ArenaAllocator, parser: *std.json.Parser, jso
// TODO: Unimplemented methods, implement them and add them to server capabilities. // TODO: Unimplemented methods, implement them and add them to server capabilities.
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
} }
if (tree.root.Object.getValue("id")) |_| { if (tree.root.Object.get("id")) |_| {
return try respondGeneric(id, not_implemented_response); return try respondGeneric(id, not_implemented_response);
} }
std.log.debug(.main, "Method without return value not implemented: {}", .{method}); std.log.debug(.main, "Method without return value not implemented: {}", .{method});

View File

@ -7,7 +7,7 @@ const offsets = @import("offsets.zig");
const ast = std.zig.ast; const ast = std.zig.ast;
// TODO Use an map to array lists and collect at the end instead? // TODO Use a map to array lists and collect at the end instead?
const RefHandlerContext = struct { const RefHandlerContext = struct {
edits: *std.StringHashMap([]types.TextEdit), edits: *std.StringHashMap([]types.TextEdit),
allocator: *std.mem.Allocator, allocator: *std.mem.Allocator,
@ -15,7 +15,7 @@ const RefHandlerContext = struct {
}; };
fn refHandler(context: RefHandlerContext, loc: types.Location) !void { fn refHandler(context: RefHandlerContext, loc: types.Location) !void {
var text_edits = if (context.edits.getValue(loc.uri)) |slice| var text_edits = if (context.edits.get(loc.uri)) |slice|
std.ArrayList(types.TextEdit).fromOwnedSlice(context.allocator, slice) std.ArrayList(types.TextEdit).fromOwnedSlice(context.allocator, slice)
else else
std.ArrayList(types.TextEdit).init(context.allocator); std.ArrayList(types.TextEdit).init(context.allocator);
@ -24,7 +24,7 @@ fn refHandler(context: RefHandlerContext, loc: types.Location) !void {
.range = loc.range, .range = loc.range,
.newText = context.new_name, .newText = context.new_name,
}; };
_ = try context.edits.put(loc.uri, text_edits.toOwnedSlice()); try context.edits.put(loc.uri, text_edits.toOwnedSlice());
} }
pub fn renameSymbol( pub fn renameSymbol(

View File

@ -40,7 +40,7 @@ inline fn fromDynamicTreeInternal(arena: *std.heap.ArenaAllocator, value: std.js
const is_default = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "default", "value_type" }) else false; const is_default = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "default", "value_type" }) else false;
const is_transform = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "original_type", "transform" }) else false; const is_transform = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "original_type", "transform" }) else false;
if (value.Object.getValue(field.name)) |json_field| { if (value.Object.get(field.name)) |json_field| {
if (is_exists) { if (is_exists) {
@field(out, field.name) = Exists{ .exists = true }; @field(out, field.name) = Exists{ .exists = true };
} else if (is_transform) { } else if (is_transform) {