add multi object for loop support (#1011)

* run `zig fmt`

* add support for multi object for loops

* add completion tests on multi object for loops

* update minimum zig build version

* use multi object for loops in codebase

* Update tres to latest version

* fix panics when generating document scope on invalid for loops
This commit is contained in:
Techatrix 2023-02-21 22:11:35 +00:00 committed by GitHub
parent b635317e2a
commit 0f77fd5b0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 281 additions and 130 deletions

View File

@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
pub fn build(b: *std.build.Builder) !void {
comptime {
const current_zig = builtin.zig_version;
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1570+693b12f8e") catch return; // addPackage -> addModule
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1681+0bb178bbb") catch return; // implement multi-object for loops
if (current_zig.order(min_zig) == .lt) {
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
}

View File

@ -785,7 +785,7 @@ pub fn interpret(
var writer = final.writer();
try writer.writeAll("log: ");
for (params) |param, index| {
for (params, 0..) |param, index| {
var value = (try interpreter.interpret(param, namespace, options)).maybeGetValue() orelse {
try writer.writeAll("indeterminate");
continue;

View File

@ -270,7 +270,7 @@ fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void {
var queue = std.ArrayListUnmanaged(Uri){};
for (self.handles.values()) |handle, handle_index| {
for (self.handles.values(), 0..) |handle, handle_index| {
if (!handle.open) continue;
reachable.set(handle_index);

View File

@ -308,7 +308,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
}
}
for (handle.cimports.items(.hash)) |hash, i| {
for (handle.cimports.items(.hash), handle.cimports.items(.node)) |hash, node| {
const result = server.document_store.cimports.get(hash) orelse continue;
if (result != .failure) continue;
const stderr = std.mem.trim(u8, result.failure, " ");
@ -318,7 +318,6 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
_ = pos_and_diag_iterator.next(); // skip line
_ = pos_and_diag_iterator.next(); // skip character
const node = handle.cimports.items(.node)[i];
try diagnostics.append(allocator, .{
.range = offsets.nodeToRange(handle.tree, node, server.offset_encoding),
.severity = .Error,
@ -1044,7 +1043,7 @@ fn gotoDefinitionBuiltin(
const name = offsets.tokenIndexToSlice(handle.tree.source, loc.start);
if (std.mem.eql(u8, name, "@cImport")) {
const index = for (handle.cimports.items(.node)) |cimport_node, index| {
const index = for (handle.cimports.items(.node), 0..) |cimport_node, index| {
const main_token = handle.tree.nodes.items(.main_token)[cimport_node];
if (loc.start == offsets.tokenToIndex(handle.tree, main_token)) break index;
} else return null;
@ -1091,7 +1090,7 @@ fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *const Docu
var writer = contents.writer(server.arena.allocator());
if (std.mem.eql(u8, name, "cImport")) blk: {
const index = for (handle.cimports.items(.node)) |cimport_node, index| {
const index = for (handle.cimports.items(.node), 0..) |cimport_node, index| {
const main_token = handle.tree.nodes.items(.main_token)[cimport_node];
const cimport_loc = offsets.tokenToLoc(handle.tree, main_token);
if (cimport_loc.start <= pos_index and pos_index <= cimport_loc.end) break index;
@ -1947,16 +1946,16 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
.tokenTypes = comptime block: {
const tokTypeFields = std.meta.fields(semantic_tokens.TokenType);
var names: [tokTypeFields.len][]const u8 = undefined;
for (tokTypeFields) |field, i| {
names[i] = field.name;
for (tokTypeFields, &names) |field, *name| {
name.* = field.name;
}
break :block &names;
},
.tokenModifiers = comptime block: {
const tokModFields = std.meta.fields(semantic_tokens.TokenModifiers);
var names: [tokModFields.len][]const u8 = undefined;
for (tokModFields) |field, i| {
names[i] = field.name;
for (tokModFields, &names) |field, *name| {
name.* = field.name;
}
break :block &names;
},
@ -2033,7 +2032,7 @@ fn requestConfiguration(server: *Server) Error!void {
const configuration_items = comptime confi: {
var comp_confi: [std.meta.fields(Config).len]types.ConfigurationItem = undefined;
inline for (std.meta.fields(Config)) |field, index| {
inline for (std.meta.fields(Config), 0..) |field, index| {
comp_confi[index] = .{
.section = "zls." ++ field.name,
};
@ -2064,8 +2063,7 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory}
const result = json.Array;
inline for (std.meta.fields(Config)) |field, index| {
const value = result.items[index];
inline for (std.meta.fields(Config), result.items) |field, value| {
const ft = if (@typeInfo(field.type) == .Optional)
@typeInfo(field.type).Optional.child
else
@ -2641,7 +2639,7 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
var last_position: types.Position = .{ .line = 0, .character = 0 };
var converted_hints = try server.arena.allocator().alloc(types.InlayHint, hints.len);
for (hints) |hint, i| {
for (hints, 0..) |hint, i| {
const index = offsets.tokenToIndex(handle.tree, hint.token_index);
const position = offsets.advancePosition(
handle.tree.source,
@ -2727,11 +2725,11 @@ fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) E
// descending into the child containing the position at every step.
var result = try allocator.alloc(*SelectionRange, request.positions.len);
var locs = try std.ArrayListUnmanaged(offsets.Loc).initCapacity(allocator, 32);
for (request.positions) |position, position_index| {
for (request.positions, result) |position, *out| {
const index = offsets.positionToIndex(handle.text, position, server.offset_encoding);
locs.clearRetainingCapacity();
for (handle.tree.nodes.items(.data)) |_, i| {
for (0..handle.tree.nodes.len) |i| {
const node = @intCast(Ast.Node.Index, i);
const loc = offsets.nodeToLoc(handle.tree, node);
if (loc.start <= index and index <= loc.end) {
@ -2752,11 +2750,11 @@ fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) E
}
var selection_ranges = try allocator.alloc(SelectionRange, locs.items.len);
for (selection_ranges) |*range, i| {
for (selection_ranges, 0..) |*range, i| {
range.range = offsets.locToRange(handle.text, locs.items[i], server.offset_encoding);
range.parent = if (i + 1 < selection_ranges.len) &selection_ranges[i + 1] else null;
}
result[position_index] = &selection_ranges[0];
out.* = &selection_ranges[0];
}
return result;
@ -2770,8 +2768,8 @@ fn shorterLocsFirst(_: void, lhs: offsets.Loc, rhs: offsets.Loc) bool {
fn requestMethodExists(method: []const u8) bool {
const methods = comptime blk: {
var methods: [types.request_metadata.len][]const u8 = undefined;
for (types.request_metadata) |meta, i| {
methods[i] = meta.method;
for (types.request_metadata, &methods) |meta, *out| {
out.* = meta.method;
}
break :blk methods;
};
@ -2785,7 +2783,7 @@ fn requestMethodExists(method: []const u8) bool {
fn notificationMethodExists(method: []const u8) bool {
const methods = comptime blk: {
var methods: [types.notification_metadata.len][]const u8 = undefined;
for (types.notification_metadata) |meta, i| {
for (types.notification_metadata, 0..) |meta, i| {
methods[i] = meta.method;
}
break :blk methods;

View File

@ -764,7 +764,7 @@ pub const Key = union(enum) {
.error_set_type => |error_set_info| {
const names = error_set_info.names;
try writer.writeAll("error{");
for (names) |name, i| {
for (names, 0..) |name, i| {
if (i != 0) try writer.writeByte(',');
try writer.writeAll(ip.indexToKey(name).bytes);
}
@ -774,7 +774,7 @@ pub const Key = union(enum) {
.function_type => |function_info| {
try writer.writeAll("fn(");
for (function_info.args) |arg_ty, i| {
for (function_info.args, 0..) |arg_ty, i| {
if (i != 0) try writer.writeAll(", ");
if (i < 32) {
@ -809,7 +809,7 @@ pub const Key = union(enum) {
.union_type => return panicOrElse("TODO", null),
.tuple_type => |tuple_info| {
try writer.writeAll("tuple{");
for (tuple_info.types) |field_ty, i| {
for (tuple_info.types, 0..) |field_ty, i| {
if (i != 0) try writer.writeAll(", ");
const val = tuple_info.values[i];
if (val != Index.none) {
@ -1704,7 +1704,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
var seen_const = false;
var convert_to_slice = false;
var chosen_i: usize = 0;
for (types[1..]) |candidate, candidate_i| {
for (types[1..], 1..) |candidate, candidate_i| {
if (candidate == chosen) continue;
const candidate_key: Key = ip.indexToKey(candidate);
@ -1717,7 +1717,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
}
if ((try ip.coerceInMemoryAllowed(gpa, arena, candidate, chosen, true, target)) == .ok) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
@ -1728,13 +1728,13 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.f16, .f32, .f64, .f80, .f128 => {
if (chosen_key.floatBits(target) < candidate_key.floatBits(target)) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
},
.comptime_int, .comptime_float => {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
else => {},
@ -1772,13 +1772,13 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
if (chosen_bits < candidate_bits) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
},
.comptime_int => {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
else => {},
@ -1786,7 +1786,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.int_type => |chosen_info| {
if (chosen_info.bits < candidate_key.intInfo(target, ip).bits) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
},
@ -1828,7 +1828,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.f16, .f32, .f64, .f80, .f128 => continue,
.comptime_int => {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
.comptime_float => unreachable,
@ -1861,13 +1861,13 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
if (chosen_bits < candidate_bits) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
},
.comptime_int => {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
else => {},
@ -1875,7 +1875,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.int_type => |chosen_info| {
if (chosen_info.bits < candidate_info.bits) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
},
@ -1887,7 +1887,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.comptime_int => {
if (candidate_info.size == .C) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
},
@ -1908,7 +1908,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
// In case we see i.e.: `*[1]T`, `*[2]T`, `[*]T`
convert_to_slice = false;
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
if (candidate_info.size == .One and
@ -1941,7 +1941,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
if (cand_ok) {
convert_to_slice = true;
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
@ -1963,7 +1963,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
if (cand_ok) {
if (!chosen_ok or chosen_info.size != .C) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
}
continue;
} else {
@ -1979,7 +1979,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.int_type => {
if (candidate_info.size == .C) {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
},
@ -2022,7 +2022,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.ok == try ip.coerceInMemoryAllowedFns(gpa, arena, chosen_info, candidate_elem_key.function_type, target))
{
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
}
@ -2043,13 +2043,13 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
seen_const = seen_const or chosen_key.isConstPtr();
any_are_null = false;
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
.vector_type => switch (chosen_key) {
.array_type => {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
else => {},
@ -2063,13 +2063,13 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
.undefined_type,
=> {
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
.null_type => {
any_are_null = true;
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
},
else => {},
@ -2081,7 +2081,7 @@ pub fn resolvePeerTypes(ip: *InternPool, gpa: Allocator, types: []const Index, t
if ((try ip.coerceInMemoryAllowed(gpa, arena, candidate, chosen_info.payload_type, true, target)) == .ok) {
any_are_null = true;
chosen = candidate;
chosen_i = candidate_i + 1;
chosen_i = candidate_i;
continue;
}
},
@ -2549,9 +2549,7 @@ fn coerceInMemoryAllowedFns(
} };
}
for (dest_info.args) |dest_arg_ty, i| {
const src_arg_ty = src_info.args[i];
for (dest_info.args, src_info.args, 0..) |dest_arg_ty, src_arg_ty, i| {
// Note: Cast direction is reversed here.
const param = try ip.coerceInMemoryAllowed(gpa, arena, src_arg_ty, dest_arg_ty, true, target);
if (param != .ok) {

View File

@ -112,12 +112,11 @@ pub fn dotCompletions(
},
.enum_type => |enum_index| {
const enum_info = ip.getEnum(enum_index);
for (enum_info.fields.keys()) |field_name, i| {
const field_val = enum_info.values.keys()[i];
for (enum_info.fields.keys(), enum_info.values.keys()) |field_name, field_value| {
try completions.append(arena, .{
.label = field_name,
.kind = .Field,
.detail = try std.fmt.allocPrint(arena, "{}", .{field_val.fmtValue(enum_info.tag_type, ip.*)}),
.detail = try std.fmt.allocPrint(arena, "{}", .{field_value.fmtValue(enum_info.tag_type, ip.*)}),
});
}
},
@ -138,7 +137,7 @@ pub fn dotCompletions(
}
},
.tuple_type => |tuple_info| {
for (tuple_info.types) |tuple_ty, i| {
for (tuple_info.types, 0..) |tuple_ty, i| {
try completions.append(arena, .{
.label = try std.fmt.allocPrint(arena, "{d}", .{i}),
.kind = .Field,

View File

@ -2086,7 +2086,7 @@ fn findContainerScopeIndex(container_handle: NodeWithHandle) ?usize {
if (!ast.isContainer(handle.tree, container)) return null;
return for (handle.document_scope.scopes.items(.data)) |data, scope_index| {
return for (handle.document_scope.scopes.items(.data), 0..) |data, scope_index| {
switch (data) {
.container => |node| if (node == container) {
break scope_index;
@ -2821,11 +2821,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.@"while",
.while_simple,
.while_cont,
.@"for",
.for_simple,
=> {
const while_node = ast.fullWhile(tree, node_idx).?;
const is_for = node_tag == .@"for" or node_tag == .for_simple;
if (while_node.label_token) |label| {
std.debug.assert(token_tags[label] == .identifier);
@ -2858,12 +2855,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
std.debug.assert(token_tags[name_token] == .identifier);
const name = tree.tokenSlice(name_token);
try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, if (is_for) .{
.array_payload = .{
.identifier = name_token,
.array_expr = while_node.ast.cond_expr,
},
} else .{
try scopes.items(.decls)[scope_index].putNoClobber(allocator, name, .{
.pointer_payload = .{
.name = name_token,
.condition = while_node.ast.cond_expr,
@ -2904,6 +2896,59 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
try makeScopeInternal(allocator, context, while_node.ast.else_expr);
}
},
.@"for",
.for_simple,
=> {
const for_node = ast.fullFor(tree, node_idx).?;
if (for_node.label_token) |label| {
std.debug.assert(token_tags[label] == .identifier);
try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, for_node.ast.for_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, node_idx)).end,
},
.data = .other,
});
const scope_index = scopes.len - 1;
try scopes.items(.decls)[scope_index].putNoClobber(allocator, tree.tokenSlice(label), .{ .label_decl = .{
.label = label,
.block = for_node.ast.then_expr,
} });
}
try scopes.append(allocator, .{
.loc = .{
.start = offsets.tokenToIndex(tree, for_node.payload_token),
.end = offsets.tokenToLoc(tree, ast.lastToken(tree, for_node.ast.then_expr)).end,
},
.data = .other,
});
const scope_index = scopes.len - 1;
var capture_token = for_node.payload_token;
for (for_node.ast.inputs) |input| {
if (capture_token + 1 >= tree.tokens.len) break;
const capture_is_ref = token_tags[capture_token] == .asterisk;
const name_token = capture_token + @boolToInt(capture_is_ref);
capture_token = name_token + 2;
const name = offsets.tokenToSlice(tree, name_token);
try scopes.items(.decls)[scope_index].put(allocator, name, .{
.array_payload = .{
.identifier = name_token,
.array_expr = input,
},
});
}
try makeScopeInternal(allocator, context, for_node.ast.then_expr);
if (for_node.ast.else_expr != 0) {
try makeScopeInternal(allocator, context, for_node.ast.else_expr);
}
},
.@"switch",
.switch_comma,
=> {
@ -3171,6 +3216,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
.array_type,
.array_access,
.error_union,
.for_range,
=> {
try makeScopeInternal(allocator, context, data[node_idx].lhs);
try makeScopeInternal(allocator, context, data[node_idx].rhs);

View File

@ -195,6 +195,33 @@ fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
return result;
}
fn fullForComponents(tree: Ast, info: full.For.Components) full.For {
const token_tags = tree.tokens.items(.tag);
var result: full.For = .{
.ast = info,
.inline_token = null,
.label_token = null,
.payload_token = undefined,
.else_token = undefined,
};
var tok_i = info.for_token -| 1;
if (token_tags[tok_i] == .keyword_inline) {
result.inline_token = tok_i;
tok_i -|= 1;
}
if (token_tags[tok_i] == .colon and
token_tags[tok_i -| 1] == .identifier)
{
result.label_token = tok_i -| 1;
}
const last_cond_token = lastToken(tree, info.inputs[info.inputs.len - 1]);
result.payload_token = last_cond_token + 3 + @boolToInt(token_tags[last_cond_token + 1] == .comma);
if (info.else_expr != 0) {
result.else_token = lastToken(tree, info.then_expr) + 1;
}
return result;
}
pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
return fullWhileComponents(tree, .{
@ -230,26 +257,28 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While {
});
}
pub fn forSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
pub fn forSimple(tree: Ast, node: Node.Index) full.For {
const data = &tree.nodes.items(.data)[node];
const inputs: *[1]Node.Index = &data.lhs;
return fullForComponents(tree, .{
.for_token = tree.nodes.items(.main_token)[node],
.inputs = inputs[0..1],
.then_expr = data.rhs,
.else_expr = 0,
});
}
pub fn forFull(tree: Ast, node: Node.Index) full.While {
pub fn forFull(tree: Ast, node: Node.Index) full.For {
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.If);
return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
.then_expr = extra.then_expr,
.else_expr = extra.else_expr,
const extra = @bitCast(Node.For, data.rhs);
const inputs = tree.extra_data[data.lhs..][0..extra.inputs];
const then_expr = tree.extra_data[data.lhs + extra.inputs];
const else_expr = if (extra.has_else) tree.extra_data[data.lhs + extra.inputs + 1] else 0;
return fullForComponents(tree, .{
.for_token = tree.nodes.items(.main_token)[node],
.inputs = inputs,
.then_expr = then_expr,
.else_expr = else_expr,
});
}
@ -276,6 +305,12 @@ pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
.while_simple => whileSimple(tree, node),
.while_cont => whileCont(tree, node),
.@"while" => whileFull(tree, node),
else => null,
};
}
pub fn fullFor(tree: Ast, node: Node.Index) ?full.For {
return switch (tree.nodes.items(.tag)[node]) {
.for_simple => forSimple(tree, node),
.@"for" => forFull(tree, node),
else => null,
@ -507,6 +542,12 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
return main_tokens[n] + end_offset;
},
.for_range => if (datas[n].rhs != 0) {
n = datas[n].rhs;
} else {
return main_tokens[n] + end_offset;
},
.call, .async_call => {
end_offset += 1; // for the rparen
const params = tree.extraData(datas[n].rhs, Node.SubRange);
@ -914,11 +955,15 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
std.debug.assert(extra.else_expr != 0);
n = extra.else_expr;
},
.@"if", .@"for" => {
.@"if" => {
const extra = tree.extraData(datas[n].rhs, Node.If);
std.debug.assert(extra.else_expr != 0);
n = extra.else_expr;
},
.@"for" => {
const extra = @bitCast(Node.For, datas[n].rhs);
n = tree.extra_data[datas[n].lhs + extra.inputs + @boolToInt(extra.has_else)];
},
.@"suspend" => {
if (datas[n].lhs != 0) {
n = datas[n].lhs;
@ -1239,6 +1284,7 @@ pub fn iterateChildren(
.block_two,
.block_two_semicolon,
.error_union,
.for_range,
=> {
try callback(context, node_data[node].lhs);
try callback(context, node_data[node].rhs);
@ -1367,8 +1413,6 @@ pub fn iterateChildren(
.while_simple,
.while_cont,
.@"while",
.for_simple,
.@"for",
=> {
const while_ast = fullWhile(tree, node).?.ast;
try callback(context, while_ast.cond_expr);
@ -1376,6 +1420,16 @@ pub fn iterateChildren(
try callback(context, while_ast.then_expr);
try callback(context, while_ast.else_expr);
},
.for_simple,
.@"for",
=> {
const for_ast = fullFor(tree, node).?.ast;
for (for_ast.inputs) |child| {
try callback(context, child);
}
try callback(context, for_ast.then_expr);
try callback(context, for_ast.else_expr);
},
.@"if",
.if_simple,

View File

@ -288,7 +288,7 @@ fn createCamelcaseText(allocator: std.mem.Allocator, identifier: []const u8) ![]
fn createDiscardText(builder: *Builder, identifier_name: []const u8, declaration_start: usize, add_block_indentation: bool) ![]const u8 {
const indent = find_indent: {
const line = offsets.lineSliceUntilIndex(builder.handle.text, declaration_start);
for (line) |char, i| {
for (line, 0..) |char, i| {
if (!std.ascii.isWhitespace(char)) {
break :find_indent line[0..i];
}

View File

@ -634,7 +634,7 @@ fn writeMarkdownFromHtmlInternal(html: []const u8, single_line: bool, depth: u32
const tags: []const []const u8 = &.{ "pre", "p", "em", "ul", "li", "a", "code" };
const opening_tags: []const []const u8 = &.{ "<pre>", "<p>", "<em>", "<ul>", "<li>", "<a>", "<code>" };
const closing_tags: []const []const u8 = &.{ "</pre>", "</p>", "</em>", "</ul>", "</li>", "</a>", "</code>" };
const tag_index = for (tags) |tag_name, i| {
const tag_index = for (tags, 0..) |tag_name, i| {
if (std.mem.startsWith(u8, html[tag_start_index + 1 ..], tag_name)) break i;
} else {
index += 1;

View File

@ -210,10 +210,10 @@ pub const DidChangeConfigurationParams = struct {
fn getConfigurationType() type {
var config_info: std.builtin.Type = @typeInfo(Config);
var fields: [config_info.Struct.fields.len]std.builtin.Type.StructField = undefined;
for (config_info.Struct.fields) |field, i| {
fields[i] = field;
for (config_info.Struct.fields, &fields) |field, *new_field| {
new_field.* = field;
if (@typeInfo(field.type) != .Optional) {
fields[i].type = @Type(std.builtin.Type{
new_field.type = @Type(std.builtin.Type{
.Optional = .{ .child = field.type },
});
}

View File

@ -54,14 +54,14 @@ const Builder = struct {
}
pub fn getRanges(builder: Builder) error{OutOfMemory}![]types.FoldingRange {
var result = try builder.allocator.alloc(types.FoldingRange, builder.locations.items.len);
errdefer builder.allocator.free(result);
var result_locations = try builder.allocator.alloc(types.FoldingRange, builder.locations.items.len);
errdefer builder.allocator.free(result_locations);
for (result) |*r, i| {
r.* = .{
for (builder.locations.items, result_locations) |folding_range, *result| {
result.* = .{
.startLine = undefined,
.endLine = undefined,
.kind = builder.locations.items[i].kind,
.kind = folding_range.kind,
};
}
@ -88,9 +88,9 @@ const Builder = struct {
var items = try builder.allocator.alloc(Item, builder.locations.items.len * 2);
defer builder.allocator.free(items);
for (builder.locations.items) |*folding_range, i| {
items[2 * i + 0] = .{ .output = &result[i], .input = folding_range, .where = .start };
items[2 * i + 1] = .{ .output = &result[i], .input = folding_range, .where = .end };
for (builder.locations.items, result_locations, 0..) |*folding_range, *result, i| {
items[2 * i + 0] = .{ .output = result, .input = folding_range, .where = .start };
items[2 * i + 1] = .{ .output = result, .input = folding_range, .where = .end };
}
// sort items based on their source position
@ -116,7 +116,7 @@ const Builder = struct {
}
}
return result;
return result_locations;
}
};
@ -135,7 +135,7 @@ pub fn generateFoldingRanges(allocator: std.mem.Allocator, tree: Ast, encoding:
var start_doc_comment: ?Ast.TokenIndex = null;
var end_doc_comment: ?Ast.TokenIndex = null;
for (token_tags) |tag, i| {
for (token_tags, 0..) |tag, i| {
const token = @intCast(Ast.TokenIndex, i);
switch (tag) {
.doc_comment,
@ -162,7 +162,7 @@ pub fn generateFoldingRanges(allocator: std.mem.Allocator, tree: Ast, encoding:
// TODO add folding range for top level `@Import()`
for (node_tags) |node_tag, i| {
for (node_tags, 0..) |node_tag, i| {
const node = @intCast(Ast.Node.Index, i);
switch (node_tag) {

View File

@ -127,13 +127,11 @@ fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.D
/// takes parameter nodes from the ast and function parameter names from `Builtin.arguments` and writes parameter hints into `builder.hints`
fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, arguments: []const []const u8) !void {
if (parameters.len == 0) return;
const handle = builder.handle;
const tree = handle.tree;
for (arguments) |arg, i| {
if (i >= parameters.len) break;
const len = @min(arguments.len, parameters.len);
for (arguments[0..len], parameters[0..len]) |arg, parameter| {
if (arg.len == 0) continue;
const colonIndex = std.mem.indexOfScalar(u8, arg, ':');
@ -153,7 +151,7 @@ fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, argum
}
try builder.appendParameterHint(
tree.firstToken(parameters[i]),
tree.firstToken(parameter),
label orelse "",
std.mem.trim(u8, type_expr, " \t\n"),
no_alias,

View File

@ -225,7 +225,9 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult {
const fields = @typeInfo(ArgId).Enum.fields;
const KV = struct { []const u8, ArgId };
var pairs: [fields.len]KV = undefined;
for (pairs) |*pair, i| pair.* = .{ fields[i].name, @intToEnum(ArgId, fields[i].value) };
for (&pairs, fields) |*pair, field| {
pair.* = .{ field.name, @intToEnum(ArgId, field.value) };
}
break :blk pairs[0..];
});
const help_message: []const u8 = comptime help_message: {

View File

@ -20,7 +20,7 @@ pub fn indexToPosition(text: []const u8, index: usize, encoding: Encoding) types
pub fn maybePositionToIndex(text: []const u8, position: types.Position, encoding: Encoding) ?usize {
var line: u32 = 0;
var line_start_index: usize = 0;
for (text) |c, i| {
for (text, 0..) |c, i| {
if (line == position.line) break;
if (c == '\n') {
line += 1;
@ -39,7 +39,7 @@ pub fn maybePositionToIndex(text: []const u8, position: types.Position, encoding
pub fn positionToIndex(text: []const u8, position: types.Position, encoding: Encoding) usize {
var line: u32 = 0;
var line_start_index: usize = 0;
for (text) |c, i| {
for (text, 0..) |c, i| {
if (line == position.line) break;
if (c == '\n') {
line += 1;

View File

@ -37,7 +37,7 @@ pub const TokenModifiers = packed struct {
fn toInt(self: TokenModifiers) u32 {
var res: u32 = 0;
inline for (std.meta.fields(TokenModifiers)) |field, i| {
inline for (std.meta.fields(TokenModifiers), 0..) |field, i| {
if (@field(self, field.name)) {
res |= 1 << i;
}
@ -534,8 +534,6 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe
.@"while",
.while_simple,
.while_cont,
.for_simple,
.@"for",
=> {
const while_node = ast.fullWhile(tree, node).?;
try writeToken(builder, while_node.label_token, .label);
@ -568,6 +566,39 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe
try callWriteNodeTokens(allocator, .{ builder, while_node.ast.else_expr });
}
},
.for_simple,
.@"for",
=> {
const for_node = ast.fullFor(tree, node).?;
try writeToken(builder, for_node.label_token, .label);
try writeToken(builder, for_node.inline_token, .keyword);
try writeToken(builder, for_node.ast.for_token, .keyword);
for (for_node.ast.inputs) |input_node| {
try callWriteNodeTokens(allocator, .{ builder, input_node });
}
var capture_token = for_node.payload_token;
for (for_node.ast.inputs) |_| {
const capture_is_ref = token_tags[capture_token] == .asterisk;
const name_token = capture_token + @boolToInt(capture_is_ref);
capture_token = name_token + 2;
if (capture_is_ref) {
try writeToken(builder, capture_token, .operator);
}
try writeToken(builder, name_token, .variable);
if (token_tags[name_token + 1] == .pipe) {
try writeToken(builder, name_token + 1, .operator);
}
}
try callWriteNodeTokens(allocator, .{ builder, for_node.ast.then_expr });
if (for_node.ast.else_expr != 0) {
try writeToken(builder, for_node.else_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, for_node.ast.else_expr });
}
},
.@"if",
.if_simple,
=> {
@ -823,6 +854,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe
.sub_wrap,
.sub_sat,
.@"orelse",
.for_range,
=> {
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
const token_type: TokenType = switch (tag) {

View File

@ -195,9 +195,9 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
types.ParameterInformation,
builtin.arguments.len,
);
for (param_infos) |*info, i| {
for (param_infos, builtin.arguments) |*info, argument| {
info.* = .{
.label = .{ .string = builtin.arguments[i] },
.label = .{ .string = argument },
.documentation = null,
};
}

@ -1 +1 @@
Subproject commit fb23d644500ae5b93dd71b5a8406d0c83e8e4fbe
Subproject commit d8b0c24a945da02fffdae731edd1903c6889e73c

View File

@ -11,7 +11,7 @@ const reserved_chars = &[_]u8{
const reserved_escapes = blk: {
var escapes: [reserved_chars.len][3]u8 = [_][3]u8{[_]u8{undefined} ** 3} ** reserved_chars.len;
for (reserved_chars) |c, i| {
for (reserved_chars, 0..) |c, i| {
escapes[i][0] = '%';
_ = std.fmt.bufPrint(escapes[i][1..], "{X}", .{c}) catch unreachable;
}

View File

@ -3,6 +3,7 @@ const zls = @import("zls");
const Ast = std.zig.Ast;
const offsets = zls.offsets;
const translate_c = zls.translate_c;
const allocator: std.mem.Allocator = std.testing.allocator;
@ -53,13 +54,14 @@ fn testConvertCInclude(cimport_source: []const u8, expected: []const u8) !void {
const source: [:0]u8 = try std.fmt.allocPrintZ(allocator, "const c = {s};", .{cimport_source});
defer allocator.free(source);
var ast = try Ast.parse(allocator, source, .zig);
defer ast.deinit(allocator);
var tree = try Ast.parse(allocator, source, .zig);
defer tree.deinit(allocator);
const main_tokens = ast.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
const node: Ast.Node.Index = blk: {
for (ast.nodes.items(.tag)) |tag, index| {
for (node_tags, main_tokens, 0..) |tag, token, i| {
switch (tag) {
.builtin_call_two,
.builtin_call_two_comma,
@ -69,14 +71,14 @@ fn testConvertCInclude(cimport_source: []const u8, expected: []const u8) !void {
else => continue,
}
if (!std.mem.eql(u8, ast.tokenSlice(main_tokens[index]), "@cImport")) continue;
if (!std.mem.eql(u8, offsets.tokenToSlice(tree, token), "@cImport")) continue;
break :blk @intCast(Ast.Node.Index, index);
break :blk @intCast(Ast.Node.Index, i);
}
return error.TestUnexpectedResult; // source doesn't contain a cImport
};
const output = try translate_c.convertCInclude(allocator, ast, node);
const output = try translate_c.convertCInclude(allocator, tree, node);
defer allocator.free(output);
const trimmed_output = std.mem.trimRight(u8, output, &.{'\n'});

View File

@ -342,7 +342,7 @@ const Context = struct {
var args = try allocator.alloc(ComptimeInterpreter.Value, arguments.len);
defer allocator.free(args);
for (arguments) |argument, i| {
for (arguments, 0..) |argument, i| {
args[i] = .{
.interpreter = self.interpreter,
.node_idx = 0,
@ -377,7 +377,7 @@ const Context = struct {
pub fn findFn(self: Context, name: []const u8) Ast.Node.Index {
const handle = self.interpreter.getHandle();
for (handle.tree.nodes.items(.tag)) |tag, i| {
for (handle.tree.nodes.items(.tag), 0..) |tag, i| {
if (tag != .fn_decl) continue;
const node = @intCast(Ast.Node.Index, i);
var buffer: [1]Ast.Node.Index = undefined;

View File

@ -183,13 +183,35 @@ test "completion - captures" {
try testCompletion(
\\const S = struct { alpha: u32 };
\\fn foo(items: []S) void {
\\ for (items) |bar, i| {
\\ for (items, 0..) |bar, i| {
\\ bar.<cursor>
\\ }
\\}
, &.{
.{ .label = "alpha", .kind = .Field, .detail = "alpha: u32" },
});
try testCompletion(
\\const S = struct { alpha: u32 };
\\fn foo(items: [2]S) void {
\\ for (items) |bar| {
\\ bar.<cursor>
\\ }
\\}
, &.{
.{ .label = "alpha", .kind = .Field, .detail = "alpha: u32" },
});
try testCompletion(
\\const S = struct { alpha: u32 };
\\fn foo(items: []S) void {
\\ for (items, items) |_, baz| {
\\ baz.<cursor>
\\ }
\\}
, &.{
.{ .label = "alpha", .kind = .Field, .detail = "alpha: u32" },
});
try testCompletion(
\\const S = struct { alpha: u32 };

View File

@ -41,10 +41,10 @@ fn testDefinition(source: []const u8) !void {
var cursor: offsets.Loc = .{ .start = 0, .end = 0 };
var def_start: offsets.Loc = .{ .start = 0, .end = 0 };
var def_end: offsets.Loc = .{ .start = 0, .end = 0 };
for (phr.locations.items(.old)) |loc, i| {
if (mem.eql(u8, source[loc.start..loc.end], "<>")) cursor = phr.locations.items(.new)[i];
if (mem.eql(u8, source[loc.start..loc.end], "<def>")) def_start = phr.locations.items(.new)[i];
if (mem.eql(u8, source[loc.start..loc.end], "</def>")) def_end = phr.locations.items(.new)[i];
for (phr.locations.items(.old), phr.locations.items(.new)) |old, new| {
if (mem.eql(u8, source[old.start..old.end], "<>")) cursor = new;
if (mem.eql(u8, source[old.start..old.end], "<def>")) def_start = new;
if (mem.eql(u8, source[old.start..old.end], "</def>")) def_end = new;
}
const cursor_lsp = offsets.locToRange(phr.new_source, cursor, .@"utf-16").start;

View File

@ -188,7 +188,7 @@ fn testReferences(source: []const u8) !void {
const actual_loc = offsets.rangeToLoc(phr.new_source, response_location.range, ctx.server.offset_encoding);
const index = found_index: {
for (expected_locs) |expected_loc, idx| {
for (expected_locs, 0..) |expected_loc, idx| {
if (expected_loc.start != actual_loc.start) continue;
if (expected_loc.end != actual_loc.end) continue;
break :found_index idx;

View File

@ -64,7 +64,7 @@ fn testSelectionRange(source: []const u8, want: []const []const u8) !void {
const last = got.pop();
try std.testing.expectEqualStrings(phr.new_source, last);
try std.testing.expectEqual(want.len, got.items.len);
for (want) |w, i| {
try std.testing.expectEqualStrings(w, got.items[i]);
for (want, got.items) |expected, actual| {
try std.testing.expectEqualStrings(expected, actual);
}
}

View File

@ -39,7 +39,7 @@ test "semantic tokens - string literals" {
\\"",//
\\"":
,
// no idea if this output is correct but at least it doesn't crash
// no idea if this output is correct but at least it doesn't crash
&.{ 1, 3, 3, 8, 0, 1, 0, 2, 4, 0, 0, 0, 2, 9, 0 },
);
}