Merge pull request #32 from alexnask/context_independent_field_completion
Trigger global and field completions correctly in more contexts.
This commit is contained in:
commit
35a73fb7cb
118
src/main.zig
118
src/main.zig
@ -14,21 +14,28 @@ var allocator: *std.mem.Allocator = undefined;
|
|||||||
|
|
||||||
var document_store: DocumentStore = undefined;
|
var document_store: DocumentStore = undefined;
|
||||||
|
|
||||||
const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}}
|
const initialize_response =
|
||||||
|
\\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}}
|
||||||
;
|
;
|
||||||
|
|
||||||
const not_implemented_response = \\,"error":{"code":-32601,"message":"NotImplemented"}}
|
const not_implemented_response =
|
||||||
|
\\,"error":{"code":-32601,"message":"NotImplemented"}}
|
||||||
;
|
;
|
||||||
|
|
||||||
const null_result_response = \\,"result":null}
|
const null_result_response =
|
||||||
|
\\,"result":null}
|
||||||
;
|
;
|
||||||
const empty_result_response = \\,"result":{}}
|
const empty_result_response =
|
||||||
|
\\,"result":{}}
|
||||||
;
|
;
|
||||||
const empty_array_response = \\,"result":[]}
|
const empty_array_response =
|
||||||
|
\\,"result":[]}
|
||||||
;
|
;
|
||||||
const edit_not_applied_response = \\,"result":{"applied":false,"failureReason":"feature not implemented"}}
|
const edit_not_applied_response =
|
||||||
|
\\,"result":{"applied":false,"failureReason":"feature not implemented"}}
|
||||||
;
|
;
|
||||||
const no_completions_response = \\,"result":{"isIncomplete":false,"items":[]}}
|
const no_completions_response =
|
||||||
|
\\,"result":{"isIncomplete":false,"items":[]}}
|
||||||
;
|
;
|
||||||
|
|
||||||
/// Sends a request or response
|
/// Sends a request or response
|
||||||
@ -54,7 +61,7 @@ fn log(comptime fmt: []const u8, args: var) !void {
|
|||||||
.LogMessageParams = .{
|
.LogMessageParams = .{
|
||||||
.@"type" = .Log,
|
.@"type" = .Log,
|
||||||
.message = message,
|
.message = message,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -148,7 +155,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
.severity = .Information,
|
.severity = .Information,
|
||||||
.code = "BadStyle",
|
.code = "BadStyle",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
.message = "Functions should be camelCase"
|
.message = "Functions should be camelCase",
|
||||||
});
|
});
|
||||||
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
} else if (is_type_function and !analysis.isPascalCase(func_name)) {
|
||||||
try diagnostics.append(.{
|
try diagnostics.append(.{
|
||||||
@ -156,12 +163,12 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
.severity = .Information,
|
.severity = .Information,
|
||||||
.code = "BadStyle",
|
.code = "BadStyle",
|
||||||
.source = "zls",
|
.source = "zls",
|
||||||
.message = "Type functions should be PascalCase"
|
.message = "Type functions should be PascalCase",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
else => {}
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -178,10 +185,13 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *std.zig.ast.Node, config: Config) !?types.CompletionItem {
|
fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *std.zig.ast.Node, config: Config) !?types.CompletionItem {
|
||||||
var doc = if (try analysis.getDocComments(alloc, tree, decl)) |doc_comments| types.MarkupContent{
|
var doc = if (try analysis.getDocComments(alloc, tree, decl)) |doc_comments|
|
||||||
.kind = .Markdown,
|
types.MarkupContent{
|
||||||
.value = doc_comments,
|
.kind = .Markdown,
|
||||||
} else null;
|
.value = doc_comments,
|
||||||
|
}
|
||||||
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
switch (decl.id) {
|
switch (decl.id) {
|
||||||
.FnProto => {
|
.FnProto => {
|
||||||
@ -217,7 +227,7 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
@ -242,7 +252,7 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = .{.Integer = id},
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
@ -252,13 +262,13 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void {
|
fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, line_start_idx: usize, config: Config) !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse {
|
var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse {
|
||||||
return send(types.Response{
|
return send(types.Response{
|
||||||
.id = .{.Integer = id},
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
@ -272,7 +282,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
|
|
||||||
var line = try handle.document.getLine(@intCast(usize, position.line));
|
var line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
var tokenizer = std.zig.Tokenizer.init(line);
|
var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]);
|
||||||
|
|
||||||
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| {
|
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| {
|
||||||
var index: usize = 0;
|
var index: usize = 0;
|
||||||
@ -287,7 +297,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
}
|
}
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = .{.Integer = id},
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
@ -327,19 +337,26 @@ const builtin_completions = block: {
|
|||||||
without_snippets[i].insertText = builtin[1..cutoff];
|
without_snippets[i].insertText = builtin[1..cutoff];
|
||||||
}
|
}
|
||||||
|
|
||||||
break :block [2]CompletionList {
|
break :block [2]CompletionList{
|
||||||
without_snippets, with_snippets
|
without_snippets, with_snippets,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const PositionContext = enum {
|
const PositionContext = union(enum) {
|
||||||
builtin,
|
builtin,
|
||||||
comment,
|
comment,
|
||||||
string_literal,
|
string_literal,
|
||||||
field_access,
|
field_access: usize,
|
||||||
var_access,
|
var_access,
|
||||||
other,
|
other,
|
||||||
empty
|
empty,
|
||||||
|
};
|
||||||
|
|
||||||
|
const token_separators = [_]u8{
|
||||||
|
' ', '\t', '(', ')', '[', ']',
|
||||||
|
'{', '}', '|', '=', '!', ';',
|
||||||
|
',', '?', ':', '%', '+', '*',
|
||||||
|
'>', '<', '~', '-', '/', '&',
|
||||||
};
|
};
|
||||||
|
|
||||||
fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionContext {
|
fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionContext {
|
||||||
@ -351,10 +368,10 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
|
|
||||||
var line = doc.text[curr_position .. pos_index + 1];
|
var line = doc.text[curr_position .. pos_index + 1];
|
||||||
// Strip any leading whitespace.
|
// Strip any leading whitespace.
|
||||||
curr_position = 0;
|
var skipped_ws: usize = 0;
|
||||||
while (curr_position < line.len and (line[curr_position] == ' ' or line[curr_position] == '\t')) : (curr_position += 1) {}
|
while (skipped_ws < line.len and (line[skipped_ws] == ' ' or line[skipped_ws] == '\t')) : (skipped_ws += 1) {}
|
||||||
if (curr_position >= line.len) return .empty;
|
if (skipped_ws >= line.len) return .empty;
|
||||||
line = line[curr_position .. ];
|
line = line[skipped_ws..];
|
||||||
|
|
||||||
// Quick exit for comment lines and multi line string literals.
|
// Quick exit for comment lines and multi line string literals.
|
||||||
if (line.len >= 2 and line[0] == '/' and line[1] == '/')
|
if (line.len >= 2 and line[0] == '/' and line[1] == '/')
|
||||||
@ -367,6 +384,8 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
// Go over the current line character by character
|
// Go over the current line character by character
|
||||||
// and determine the context.
|
// and determine the context.
|
||||||
curr_position = 0;
|
curr_position = 0;
|
||||||
|
var expr_start: usize = skipped_ws;
|
||||||
|
|
||||||
var new_token = true;
|
var new_token = true;
|
||||||
var context: PositionContext = .other;
|
var context: PositionContext = .other;
|
||||||
var string_pop_ctx: PositionContext = .other;
|
var string_pop_ctx: PositionContext = .other;
|
||||||
@ -375,6 +394,7 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
const next_char = if (curr_position < line.len - 1) line[curr_position + 1] else null;
|
const next_char = if (curr_position < line.len - 1) line[curr_position + 1] else null;
|
||||||
|
|
||||||
if (context != .string_literal and c == '"') {
|
if (context != .string_literal and c == '"') {
|
||||||
|
expr_start = curr_position + skipped_ws;
|
||||||
context = .string_literal;
|
context = .string_literal;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -397,7 +417,8 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (c == ' ' or c == '\t') {
|
if (std.mem.indexOfScalar(u8, &token_separators, c) != null) {
|
||||||
|
expr_start = curr_position + skipped_ws + 1;
|
||||||
new_token = true;
|
new_token = true;
|
||||||
context = .other;
|
context = .other;
|
||||||
continue;
|
continue;
|
||||||
@ -405,12 +426,17 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
|
|
||||||
if (c == '.' and (!new_token or context == .string_literal)) {
|
if (c == '.' and (!new_token or context == .string_literal)) {
|
||||||
new_token = true;
|
new_token = true;
|
||||||
context = .field_access;
|
if (next_char != null and next_char.? == '.') continue;
|
||||||
|
context = .{ .field_access = expr_start };
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (new_token) {
|
if (new_token) {
|
||||||
const access_ctx: PositionContext = if (context == .field_access) .field_access else .var_access;
|
const access_ctx: PositionContext = if (context == .field_access)
|
||||||
|
.{ .field_access = expr_start }
|
||||||
|
else
|
||||||
|
.var_access;
|
||||||
|
|
||||||
new_token = false;
|
new_token = false;
|
||||||
|
|
||||||
if (c == '_' or std.ascii.isAlpha(c)) {
|
if (c == '_' or std.ascii.isAlpha(c)) {
|
||||||
@ -509,22 +535,19 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
const pos_index = try handle.document.positionToIndex(pos);
|
const pos_index = try handle.document.positionToIndex(pos);
|
||||||
const pos_context = documentPositionContext(handle.document, pos_index);
|
const pos_context = documentPositionContext(handle.document, pos_index);
|
||||||
|
|
||||||
if (pos_context == .builtin) {
|
switch (pos_context) {
|
||||||
try send(types.Response{
|
.builtin => try send(types.Response{
|
||||||
.id = .{.Integer = id},
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
.items = builtin_completions[@boolToInt(config.enable_snippets)][0..],
|
.items = builtin_completions[@boolToInt(config.enable_snippets)][0..],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
} else if (pos_context == .var_access or pos_context == .empty) {
|
.var_access, .empty => try completeGlobal(id, handle.*, config),
|
||||||
try completeGlobal(id, handle.*, config);
|
.field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, config),
|
||||||
} else if (pos_context == .field_access) {
|
else => try respondGeneric(id, no_completions_response),
|
||||||
try completeFieldAccess(id, handle, pos, config);
|
|
||||||
} else {
|
|
||||||
try respondGeneric(id, no_completions_response);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
try respondGeneric(id, no_completions_response);
|
try respondGeneric(id, no_completions_response);
|
||||||
@ -540,7 +563,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
// \\}]}}
|
// \\}]}}
|
||||||
// );
|
// );
|
||||||
try respondGeneric(id,
|
try respondGeneric(id,
|
||||||
\\,"result":{"signatures":[]}}
|
\\,"result":{"signatures":[]}}
|
||||||
);
|
);
|
||||||
} else if (root.Object.getValue("id")) |_| {
|
} else if (root.Object.getValue("id")) |_| {
|
||||||
try log("Method with return value not implemented: {}", .{method});
|
try log("Method with return value not implemented: {}", .{method});
|
||||||
@ -581,7 +604,7 @@ pub fn main() anyerror!void {
|
|||||||
|
|
||||||
// Read he configuration, if any.
|
// Read he configuration, if any.
|
||||||
var config = Config{};
|
var config = Config{};
|
||||||
const config_parse_options = std.json.ParseOptions{ .allocator=allocator };
|
const config_parse_options = std.json.ParseOptions{ .allocator = allocator };
|
||||||
|
|
||||||
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
||||||
config_read: {
|
config_read: {
|
||||||
@ -627,13 +650,13 @@ pub fn main() anyerror!void {
|
|||||||
|
|
||||||
stdin_poll: while (true) {
|
stdin_poll: while (true) {
|
||||||
if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) {
|
if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) {
|
||||||
|
|
||||||
index = 16;
|
index = 16;
|
||||||
while (index <= offset + 10) : (index += 1) {
|
while (index <= offset + 10) : (index += 1) {
|
||||||
const c = buffer.items[index];
|
const c = buffer.items[index];
|
||||||
if (c >= '0' and c <= '9') {
|
if (c >= '0' and c <= '9') {
|
||||||
content_len = content_len * 10 + (c - '0');
|
content_len = content_len * 10 + (c - '0');
|
||||||
} if (c == '\r' and buffer.items[index + 1] == '\n') {
|
}
|
||||||
|
if (c == '\r' and buffer.items[index + 1] == '\n') {
|
||||||
index += 2;
|
index += 2;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -663,7 +686,6 @@ pub fn main() anyerror!void {
|
|||||||
} else {
|
} else {
|
||||||
try log("\\r not found", .{});
|
try log("\\r not found", .{});
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (offset >= 16) {
|
} else if (offset >= 16) {
|
||||||
try log("Offset is greater than 16!", .{});
|
try log("Offset is greater than 16!", .{});
|
||||||
return;
|
return;
|
||||||
|
Loading…
Reference in New Issue
Block a user