Preparation for imports, abstracted document hashmap into a document storage type

This commit is contained in:
Alexandros Naskos 2020-05-14 02:10:41 +03:00
parent be95849a15
commit 307dceb703
5 changed files with 334 additions and 206 deletions

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const types = @import("types.zig");
const ast = std.zig.ast; const ast = std.zig.ast;
/// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing) /// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing)
@ -154,20 +154,20 @@ pub fn isPascalCase(name: []const u8) bool {
// ANALYSIS ENGINE // ANALYSIS ENGINE
/// Gets the child of node /// Gets the child of node
pub fn getChild(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, name: []const u8) ?*std.zig.ast.Node { pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node {
var index: usize = 0; var index: usize = 0;
while (node.iterate(index)) |child| { while (node.iterate(index)) |child| {
switch (child.id) { switch (child.id) {
.VarDecl => { .VarDecl => {
const vari = child.cast(std.zig.ast.Node.VarDecl).?; const vari = child.cast(ast.Node.VarDecl).?;
if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child; if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child;
}, },
.FnProto => { .FnProto => {
const func = child.cast(std.zig.ast.Node.FnProto).?; const func = child.cast(ast.Node.FnProto).?;
if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child; if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child;
}, },
.ContainerField => { .ContainerField => {
const field = child.cast(std.zig.ast.Node.ContainerField).?; const field = child.cast(ast.Node.ContainerField).?;
if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child;
}, },
else => {} else => {}
@ -178,44 +178,44 @@ pub fn getChild(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, name: []const
} }
/// Resolves the type of a node /// Resolves the type of a node
pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std.zig.ast.Node { pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.ImportCtx) ?*ast.Node {
switch (node.id) { switch (node.id) {
.VarDecl => { .VarDecl => {
const vari = node.cast(std.zig.ast.Node.VarDecl).?; const vari = node.cast(ast.Node.VarDecl).?;
return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?) orelse null; return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null;
}, },
.FnProto => { .FnProto => {
const func = node.cast(std.zig.ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
switch (func.return_type) { switch (func.return_type) {
.Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type);} .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type, import_ctx);}
} }
}, },
.Identifier => { .Identifier => {
if (getChild(tree, &tree.root_node.base, tree.getNodeSource(node))) |child| { if (getChild(tree, &tree.root_node.base, tree.getNodeSource(node))) |child| {
return resolveTypeOfNode(tree, child); return resolveTypeOfNode(tree, child, import_ctx);
} else return null; } else return null;
}, },
.ContainerDecl => { .ContainerDecl => {
return node; return node;
}, },
.ContainerField => { .ContainerField => {
const field = node.cast(std.zig.ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return resolveTypeOfNode(tree, field.type_expr.?); return resolveTypeOfNode(tree, field.type_expr.?, import_ctx);
}, },
.SuffixOp => { .SuffixOp => {
const suffix_op = node.cast(std.zig.ast.Node.SuffixOp).?; const suffix_op = node.cast(ast.Node.SuffixOp).?;
switch (suffix_op.op) { switch (suffix_op.op) {
.Call => { .Call => {
return resolveTypeOfNode(tree, suffix_op.lhs.node); return resolveTypeOfNode(tree, suffix_op.lhs.node, import_ctx);
}, },
else => {} else => {}
} }
}, },
.InfixOp => { .InfixOp => {
const infix_op = node.cast(std.zig.ast.Node.InfixOp).?; const infix_op = node.cast(ast.Node.InfixOp).?;
switch (infix_op.op) { switch (infix_op.op) {
.Period => { .Period => {
var left = resolveTypeOfNode(tree, infix_op.lhs).?; var left = resolveTypeOfNode(tree, infix_op.lhs, import_ctx) orelse return null;
if (nodeToString(tree, infix_op.rhs)) |string| { if (nodeToString(tree, infix_op.rhs)) |string| {
return getChild(tree, left, string); return getChild(tree, left, string);
} else return null; } else return null;
@ -224,14 +224,27 @@ pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std
} }
}, },
.PrefixOp => { .PrefixOp => {
const prefix_op = node.cast(std.zig.ast.Node.PrefixOp).?; const prefix_op = node.cast(ast.Node.PrefixOp).?;
switch (prefix_op.op) { switch (prefix_op.op) {
.PtrType => { .PtrType => {
return resolveTypeOfNode(tree, prefix_op.rhs); return resolveTypeOfNode(tree, prefix_op.rhs, import_ctx);
}, },
else => {} else => {}
} }
}, },
.BuiltinCall => {
const builtin_call = node.cast(ast.Node.BuiltinCall).?;
if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return null;
if (builtin_call.params.len > 1) return null;
const import_param = builtin_call.params.at(0).*;
if (import_param.id != .StringLiteral) return null;
var import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
import_str = import_str[1 .. import_str.len - 1];
return resolveImport(import_str);
},
else => { else => {
std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); std.debug.warn("Type resolution case not implemented; {}\n", .{node.id});
} }
@ -239,8 +252,14 @@ pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std
return null; return null;
} }
pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, tokenizer: *std.zig.Tokenizer) ?*std.zig.ast.Node { fn resolveImport(import: []const u8) ?*ast.Node {
var current_node = node; // @TODO: Write this
std.debug.warn("Resolving import {}\n", .{import});
return null;
}
pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *types.ImportCtx) ?*ast.Node {
var current_node = &tree.root_node.base;
while (true) { while (true) {
var next = tokenizer.next(); var next = tokenizer.next();
@ -249,13 +268,11 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token
return current_node; return current_node;
}, },
.Identifier => { .Identifier => {
// var root = current_node.cast(std.zig.ast.Node.Root).?; // var root = current_node.cast(ast.Node.Root).?;
// current_node. // current_node.
if (getChild(tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { if (getChild(tree, current_node, tokenizer.buffer[next.start..next.end])) |child| {
if (resolveTypeOfNode(tree, child)) |node_type| { if (resolveTypeOfNode(tree, child, import_ctx)) |node_type| {
if (resolveTypeOfNode(tree, child)) |child_type| { current_node = node_type;
current_node = child_type;
} else return null;
} else return null; } else return null;
} else return null; } else return null;
}, },
@ -265,7 +282,7 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token
return current_node; return current_node;
} else if (after_period.id == .Identifier) { } else if (after_period.id == .Identifier) {
if (getChild(tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { if (getChild(tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| {
if (resolveTypeOfNode(tree, child)) |child_type| { if (resolveTypeOfNode(tree, child, import_ctx)) |child_type| {
current_node = child_type; current_node = child_type;
} else return null; } else return null;
} else return null; } else return null;
@ -280,8 +297,8 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token
return current_node; return current_node;
} }
pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ![]*std.zig.ast.Node { pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) ![]*ast.Node {
var nodes = std.ArrayList(*std.zig.ast.Node).init(allocator); var nodes = std.ArrayList(*ast.Node).init(allocator);
var index: usize = 0; var index: usize = 0;
while (node.iterate(index)) |child_node| { while (node.iterate(index)) |child_node| {
@ -293,18 +310,18 @@ pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *std.zig.ast.
return nodes.items; return nodes.items;
} }
pub fn nodeToString(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?[]const u8 { pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
switch (node.id) { switch (node.id) {
.ContainerField => { .ContainerField => {
const field = node.cast(std.zig.ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return tree.tokenSlice(field.name_token); return tree.tokenSlice(field.name_token);
}, },
.Identifier => { .Identifier => {
const field = node.cast(std.zig.ast.Node.Identifier).?; const field = node.cast(ast.Node.Identifier).?;
return tree.tokenSlice(field.token); return tree.tokenSlice(field.token);
}, },
.FnProto => { .FnProto => {
const func = node.cast(std.zig.ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
return tree.tokenSlice(name_token); return tree.tokenSlice(name_token);
} }
@ -317,7 +334,7 @@ pub fn nodeToString(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?[]const u
return null; return null;
} }
pub fn nodesToString(tree: *std.zig.ast.Tree, maybe_nodes: ?[]*std.zig.ast.Node) void { pub fn nodesToString(tree: *ast.Tree, maybe_nodes: ?[]*ast.Node) void {
if (maybe_nodes) |nodes| { if (maybe_nodes) |nodes| {
for (nodes) |node| { for (nodes) |node| {
std.debug.warn("- {}\n", .{nodeToString(tree, node)}); std.debug.warn("- {}\n", .{nodeToString(tree, node)});

View File

@ -2,3 +2,6 @@
/// Whether to enable snippet completions /// Whether to enable snippet completions
enable_snippets: bool = true, enable_snippets: bool = true,
/// zig installation path
zig_path: ?[]const u8 = null,

202
src/document_store.zig Normal file
View File

@ -0,0 +1,202 @@
const std = @import("std");
const types = @import("types.zig");
const DocumentStore = @This();
pub const Handle = struct {
document: types.TextDocument,
count: usize,
import_uris: [][]const u8,
pub fn uri(handle: Handle) []const u8 {
return handle.document.uri;
}
/// Returns the zig AST resulting from parsing the document's text, even
/// if it contains errors.
pub fn dirtyTree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree {
return try std.zig.parse(allocator, handle.document.text);
}
/// Returns a zig AST with no errors, either from the current text or
/// the stored sane text, null if no such ast exists.
pub fn saneTree(handle: Handle, allocator: *std.mem.Allocator) !?*std.zig.ast.Tree {
var tree = try std.zig.parse(allocator, handle.document.text);
if (tree.errors.len == 0) return tree;
tree.deinit();
if (handle.document.sane_text) |sane| {
return try std.zig.parse(allocator, sane);
}
return null;
}
};
allocator: *std.mem.Allocator,
handles: std.StringHashMap(Handle),
std_path: ?[]const u8,
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_path: ?[]const u8) void {
self.allocator = allocator;
self.handles = std.StringHashMap(Handle).init(allocator);
errdefer self.handles.deinit();
if (zig_path) |zpath| {
// pub fn resolve(allocator: *Allocator, paths: []const []const u8) ![]u8
self.std_path = std.fs.path.resolve(allocator, &[_][]const u8 {
zpath, "lib/zig/std"
}) catch |err| block: {
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
break :block null;
};
} else {
self.std_path = null;
}
}
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
if (self.handles.get(uri)) |entry| {
std.debug.warn("Document already open: {}, incrementing count\n", .{uri});
entry.value.count += 1;
std.debug.warn("New count: {}\n", .{entry.value.count});
self.allocator.free(uri);
return &entry.value;
}
std.debug.warn("Opened document: {}\n", .{uri});
const duped_text = try std.mem.dupe(self.allocator, u8, text);
errdefer self.allocator.free(duped_text);
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
errdefer self.allocator.free(duped_uri);
var handle = Handle{
.count = 1,
.import_uris = &[_][]const u8 {},
.document = .{
.uri = duped_uri,
.text = duped_text,
.mem = duped_text,
.sane_text = null,
},
};
try self.checkSanity(&handle);
try self.handles.putNoClobber(duped_uri, handle);
return &(self.handles.get(duped_uri) orelse unreachable).value;
}
fn decrementCount(self: *DocumentStore, uri: []const u8) void {
if (self.handles.get(uri)) |entry| {
entry.value.count -= 1;
if (entry.value.count == 0) {
std.debug.warn("Freeing document: {}\n", .{uri});
}
self.allocator.free(entry.value.document.uri);
self.allocator.free(entry.value.document.mem);
if (entry.value.document.sane_text) |sane| {
self.allocator.free(sane);
}
for (entry.value.import_uris) |import_uri| {
self.decrementCount(import_uri);
self.allocator.free(import_uri);
}
if (entry.value.import_uris.len > 0) {
self.allocator.free(entry.value.import_uris);
}
const uri_key = entry.key;
self.handles.removeAssertDiscard(uri);
self.allocator.free(uri_key);
}
}
pub fn closeDocument(self: *DocumentStore, uri: []const u8) void {
self.decrementCount(uri);
}
pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
if (self.handles.get(uri)) |entry| {
return &entry.value;
}
return null;
}
// Check if the document text is now sane, move it to sane_text if so.
fn checkSanity(self: *DocumentStore, handle: *Handle) !void {
const dirty_tree = try handle.dirtyTree(self.allocator);
defer dirty_tree.deinit();
if (dirty_tree.errors.len == 0) {
std.debug.warn("New sane text for document {}\n", .{handle.uri()});
if (handle.document.sane_text) |sane| {
self.allocator.free(sane);
}
handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text);
}
}
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
var document = &handle.document;
for (content_changes.items) |change| {
if (change.Object.getValue("range")) |range| {
const start_pos = types.Position{
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer,
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer
};
const end_pos = types.Position{
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer,
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer
};
const change_text = change.Object.getValue("text").?.String;
const start_index = try document.positionToIndex(start_pos);
const end_index = try document.positionToIndex(end_pos);
const old_len = document.text.len;
const new_len = old_len + change_text.len;
if (new_len > document.mem.len) {
// We need to reallocate memory.
// We reallocate twice the current filesize or the new length, if it's more than that
// so that we can reduce the amount of realloc calls.
// We can tune this to find a better size if needed.
const realloc_len = std.math.max(2 * old_len, new_len);
document.mem = try self.allocator.realloc(document.mem, realloc_len);
}
// The first part of the string, [0 .. start_index] need not be changed.
// We then copy the last part of the string, [end_index ..] to its
// new position, [start_index + change_len .. ]
std.mem.copy(u8, document.mem[start_index + change_text.len..][0 .. old_len - end_index], document.mem[end_index .. old_len]);
// Finally, we copy the changes over.
std.mem.copy(u8, document.mem[start_index..][0 .. change_text.len], change_text);
// Reset the text substring.
document.text = document.mem[0 .. new_len];
} else {
const change_text = change.Object.getValue("text").?.String;
const old_len = document.text.len;
if (change_text.len > document.mem.len) {
// Like above.
const realloc_len = std.math.max(2 * old_len, change_text.len);
document.mem = try self.allocator.realloc(document.mem, realloc_len);
}
std.mem.copy(u8, document.mem[0 .. change_text.len], change_text);
document.text = document.mem[0 .. change_text.len];
}
}
try self.checkSanity(handle);
}
pub fn deinit(self: *DocumentStore) void {
// @TODO: Deinit everything!
self.handles.deinit();
}

View File

@ -2,7 +2,7 @@ const std = @import("std");
const build_options = @import("build_options"); const build_options = @import("build_options");
const Config = @import("config.zig"); const Config = @import("config.zig");
const Uri = @import("uri.zig"); const DocumentStore = @import("document_store.zig");
const data = @import("data/" ++ build_options.data_version ++ ".zig"); const data = @import("data/" ++ build_options.data_version ++ ".zig");
const types = @import("types.zig"); const types = @import("types.zig");
const analysis = @import("analysis.zig"); const analysis = @import("analysis.zig");
@ -12,8 +12,7 @@ const analysis = @import("analysis.zig");
var stdout: std.fs.File.OutStream = undefined; var stdout: std.fs.File.OutStream = undefined;
var allocator: *std.mem.Allocator = undefined; var allocator: *std.mem.Allocator = undefined;
/// Documents hashmap, types.DocumentUri:types.TextDocument var document_store: DocumentStore = undefined;
var documents: std.StringHashMap(types.TextDocument) = undefined;
const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}} const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}}
; ;
@ -78,48 +77,6 @@ fn respondGeneric(id: i64, response: []const u8) !void {
try stdout.writeAll(response); try stdout.writeAll(response);
} }
fn freeDocument(document: types.TextDocument) void {
allocator.free(document.uri);
allocator.free(document.mem);
if (document.sane_text) |str| {
allocator.free(str);
}
}
fn openDocument(uri: []const u8, text: []const u8) !void {
const duped_uri = try std.mem.dupe(allocator, u8, uri);
const duped_text = try std.mem.dupe(allocator, u8, text);
const res = try documents.put(duped_uri, .{
.uri = duped_uri,
.text = duped_text,
.mem = duped_text,
});
if (res) |entry| {
try log("Document already open: {}, closing old.", .{uri});
freeDocument(entry.value);
} else {
try log("Opened document: {}", .{uri});
}
}
fn closeDocument(uri: []const u8) !void {
if (documents.remove(uri)) |entry| {
try log("Closing document: {}", .{uri});
freeDocument(entry.value);
}
}
fn cacheSane(document: *types.TextDocument) !void {
try log("Caching sane text for document: {}", .{document.uri});
if (document.sane_text) |old_sane| {
allocator.free(old_sane);
}
document.sane_text = try std.mem.dupe(allocator, u8, document.text);
}
// TODO: Is this correct or can we get a better end? // TODO: Is this correct or can we get a better end?
fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range { fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range {
return .{ return .{
@ -134,8 +91,8 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range {
}; };
} }
fn publishDiagnostics(document: *types.TextDocument, config: Config) !void { fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
const tree = try std.zig.parse(allocator, document.text); const tree = try handle.dirtyTree(allocator);
defer tree.deinit(); defer tree.deinit();
// Use an arena for our local memory allocations. // Use an arena for our local memory allocations.
@ -163,7 +120,6 @@ fn publishDiagnostics(document: *types.TextDocument, config: Config) !void {
} }
if (tree.errors.len == 0) { if (tree.errors.len == 0) {
try cacheSane(document);
var decls = tree.root_node.decls.iterator(0); var decls = tree.root_node.decls.iterator(0);
while (decls.next()) |decl_ptr| { while (decls.next()) |decl_ptr| {
var decl = decl_ptr.*; var decl = decl_ptr.*;
@ -214,7 +170,7 @@ fn publishDiagnostics(document: *types.TextDocument, config: Config) !void {
.method = "textDocument/publishDiagnostics", .method = "textDocument/publishDiagnostics",
.params = .{ .params = .{
.PublishDiagnosticsParams = .{ .PublishDiagnosticsParams = .{
.uri = document.uri, .uri = handle.uri(),
.diagnostics = diagnostics.items, .diagnostics = diagnostics.items,
}, },
}, },
@ -268,18 +224,8 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s
return null; return null;
} }
fn completeGlobal(id: i64, document: *types.TextDocument, config: Config) !void { fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void {
// The tree uses its own arena, so we just pass our main allocator. var tree = (try handle.saneTree(allocator)) orelse return respondGeneric(id, no_completions_response);
var tree = try std.zig.parse(allocator, document.text);
if (tree.errors.len > 0) {
if (document.sane_text) |sane_text| {
tree.deinit();
tree = try std.zig.parse(allocator, sane_text);
} else return try respondGeneric(id, no_completions_response);
}
else try cacheSane(document);
defer tree.deinit(); defer tree.deinit();
// We use a local arena allocator to deallocate all temporary data without iterating // We use a local arena allocator to deallocate all temporary data without iterating
@ -307,9 +253,18 @@ fn completeGlobal(id: i64, document: *types.TextDocument, config: Config) !void
}); });
} }
fn completeFieldAccess(id: i64, document: *types.TextDocument, position: types.Position, config: Config) !void { fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Position, config: Config) !void {
if (document.sane_text) |sane_text| { const tree = (try handle.saneTree(allocator)) orelse {
var tree = try std.zig.parse(allocator, sane_text); return try send(types.Response{
.id = .{.Integer = id},
.result = .{
.CompletionList = .{
.isIncomplete = false,
.items = &[_]types.CompletionItem{},
},
},
});
};
defer tree.deinit(); defer tree.deinit();
// We use a local arena allocator to deallocate all temporary data without iterating // We use a local arena allocator to deallocate all temporary data without iterating
@ -318,10 +273,11 @@ fn completeFieldAccess(id: i64, document: *types.TextDocument, position: types.P
// Deallocate all temporary data. // Deallocate all temporary data.
defer arena.deinit(); defer arena.deinit();
var line = try document.getLine(@intCast(usize, position.line)); var line = try handle.document.getLine(@intCast(usize, position.line));
var tokenizer = std.zig.Tokenizer.init(line); var tokenizer = std.zig.Tokenizer.init(line);
if (analysis.getNodeFromTokens(tree, &tree.root_node.base, &tokenizer)) |node| { // @TODO Pass import ctx.
if (analysis.getFieldAccessTypeNode(tree, &tokenizer, {})) |node| {
var index: usize = 0; var index: usize = 0;
while (node.iterate(index)) |child_node| { while (node.iterate(index)) |child_node| {
if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| { if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| {
@ -340,17 +296,6 @@ fn completeFieldAccess(id: i64, document: *types.TextDocument, position: types.P
}, },
}, },
}); });
} else {
return try send(types.Response{
.id = .{.Integer = id},
.result = .{
.CompletionList = .{
.isIncomplete = false,
.items = &[_]types.CompletionItem{},
},
},
});
}
} }
// Compute builtin completions at comptime. // Compute builtin completions at comptime.
@ -524,73 +469,27 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
const uri = document.getValue("uri").?.String; const uri = document.getValue("uri").?.String;
const text = document.getValue("text").?.String; const text = document.getValue("text").?.String;
try openDocument(uri, text); const handle = try document_store.openDocument(uri, text);
try publishDiagnostics(&(documents.get(uri).?.value), config); try publishDiagnostics(handle.*, config);
} else if (std.mem.eql(u8, method, "textDocument/didChange")) { } else if (std.mem.eql(u8, method, "textDocument/didChange")) {
const text_document = params.getValue("textDocument").?.Object; const text_document = params.getValue("textDocument").?.Object;
const uri = text_document.getValue("uri").?.String; const uri = text_document.getValue("uri").?.String;
var document = &(documents.get(uri).?.value);
const content_changes = params.getValue("contentChanges").?.Array; const content_changes = params.getValue("contentChanges").?.Array;
for (content_changes.items) |change| { const handle = document_store.getHandle(uri) orelse {
if (change.Object.getValue("range")) |range| { try log("Trying to change non existent document {}", .{uri});
const start_pos = types.Position{ return;
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer,
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer
};
const end_pos = types.Position{
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer,
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer
}; };
const change_text = change.Object.getValue("text").?.String; try document_store.applyChanges(handle, content_changes);
const start_index = try document.positionToIndex(start_pos); try publishDiagnostics(handle.*, config);
const end_index = try document.positionToIndex(end_pos);
const old_len = document.text.len;
const new_len = old_len + change_text.len;
if (new_len > document.mem.len) {
// We need to reallocate memory.
// We reallocate twice the current filesize or the new length, if it's more than that
// so that we can reduce the amount of realloc calls.
// We can tune this to find a better size if needed.
const realloc_len = std.math.max(2 * old_len, new_len);
document.mem = try allocator.realloc(document.mem, realloc_len);
}
// The first part of the string, [0 .. start_index] need not be changed.
// We then copy the last part of the string, [end_index ..] to its
// new position, [start_index + change_len .. ]
std.mem.copy(u8, document.mem[start_index + change_text.len..][0 .. old_len - end_index], document.mem[end_index .. old_len]);
// Finally, we copy the changes over.
std.mem.copy(u8, document.mem[start_index..][0 .. change_text.len], change_text);
// Reset the text substring.
document.text = document.mem[0 .. new_len];
} else {
const change_text = change.Object.getValue("text").?.String;
const old_len = document.text.len;
if (change_text.len > document.mem.len) {
// Like above.
const realloc_len = std.math.max(2 * old_len, change_text.len);
document.mem = try allocator.realloc(document.mem, realloc_len);
}
std.mem.copy(u8, document.mem[0 .. change_text.len], change_text);
document.text = document.mem[0 .. change_text.len];
}
}
try publishDiagnostics(document, config);
} else if (std.mem.eql(u8, method, "textDocument/didSave")) { } else if (std.mem.eql(u8, method, "textDocument/didSave")) {
// noop // noop
} else if (std.mem.eql(u8, method, "textDocument/didClose")) { } else if (std.mem.eql(u8, method, "textDocument/didClose")) {
const document = params.getValue("textDocument").?.Object; const document = params.getValue("textDocument").?.Object;
const uri = document.getValue("uri").?.String; const uri = document.getValue("uri").?.String;
try closeDocument(uri); document_store.closeDocument(uri);
} }
// Autocomplete / Signatures // Autocomplete / Signatures
else if (std.mem.eql(u8, method, "textDocument/completion")) { else if (std.mem.eql(u8, method, "textDocument/completion")) {
@ -598,14 +497,18 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
const uri = text_document.getValue("uri").?.String; const uri = text_document.getValue("uri").?.String;
const position = params.getValue("position").?.Object; const position = params.getValue("position").?.Object;
var document = &(documents.get(uri).?.value); const handle = document_store.getHandle(uri) orelse {
try log("Trying to complete in non existent document {}", .{uri});
return;
};
const pos = types.Position{ const pos = types.Position{
.line = position.getValue("line").?.Integer, .line = position.getValue("line").?.Integer,
.character = position.getValue("character").?.Integer - 1, .character = position.getValue("character").?.Integer - 1,
}; };
if (pos.character >= 0) { if (pos.character >= 0) {
const pos_index = try document.positionToIndex(pos); const pos_index = try handle.document.positionToIndex(pos);
const pos_context = documentPositionContext(document.*, pos_index); const pos_context = documentPositionContext(handle.document, pos_index);
if (pos_context == .builtin) { if (pos_context == .builtin) {
try send(types.Response{ try send(types.Response{
@ -618,9 +521,9 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
}, },
}); });
} else if (pos_context == .var_access or pos_context == .empty) { } else if (pos_context == .var_access or pos_context == .empty) {
try completeGlobal(id, document, config); try completeGlobal(id, handle.*, config);
} else if (pos_context == .field_access) { } else if (pos_context == .field_access) {
try completeFieldAccess(id, document, pos, config); try completeFieldAccess(id, handle.*, pos, config);
} else { } else {
try respondGeneric(id, no_completions_response); try respondGeneric(id, no_completions_response);
} }
@ -628,18 +531,18 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
try respondGeneric(id, no_completions_response); try respondGeneric(id, no_completions_response);
} }
} else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) { } else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) {
try respondGeneric(id,
\\,"result":{"signatures":[{
\\"label": "nameOfFunction(aNumber: u8)",
\\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"},
\\"parameters": [
\\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}}
\\]
\\}]}}
);
// try respondGeneric(id, // try respondGeneric(id,
// \\,"result":{"signatures":[]}} // \\,"result":{"signatures":[{
// \\"label": "nameOfFunction(aNumber: u8)",
// \\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"},
// \\"parameters": [
// \\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}}
// \\]
// \\}]}}
// ); // );
try respondGeneric(id,
\\,"result":{"signatures":[]}}
);
} else if (root.Object.getValue("id")) |_| { } else if (root.Object.getValue("id")) |_| {
try log("Method with return value not implemented: {}", .{method}); try log("Method with return value not implemented: {}", .{method});
try respondGeneric(id, not_implemented_response); try respondGeneric(id, not_implemented_response);
@ -677,11 +580,9 @@ pub fn main() anyerror!void {
const stdin = std.io.getStdIn().inStream(); const stdin = std.io.getStdIn().inStream();
stdout = std.io.getStdOut().outStream(); stdout = std.io.getStdOut().outStream();
documents = std.StringHashMap(types.TextDocument).init(allocator);
// Read he configuration, if any. // Read he configuration, if any.
var config = Config{}; var config = Config{};
const config_parse_options = std.json.ParseOptions{ .allocator=allocator };
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it. // TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
config_read: { config_read: {
@ -704,13 +605,15 @@ pub fn main() anyerror!void {
if (bytes_read != conf_file_stat.size) break :config_read; if (bytes_read != conf_file_stat.size) break :config_read;
// TODO: Better errors? Doesnt seem like std.json can provide us positions or context. // TODO: Better errors? Doesnt seem like std.json can provide us positions or context.
// Note that we don't need to pass an allocator to parse since we are not using pointer or slice fields. config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| {
// Thus, we don't need to even call parseFree.
config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{}) catch |err| {
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
break :config_read; break :config_read;
}; };
} }
defer std.json.parseFree(Config, config, config_parse_options);
document_store.init(allocator, config.zig_path);
defer document_store.deinit();
// This JSON parser is passed to processJsonRpc and reset. // This JSON parser is passed to processJsonRpc and reset.
var json_parser = std.json.Parser.init(allocator, false); var json_parser = std.json.Parser.init(allocator, false);

View File

@ -3,6 +3,9 @@
const std = @import("std"); const std = @import("std");
const json = std.json; const json = std.json;
// @TODO
pub const ImportCtx = void;
// JSON Types // JSON Types
pub const String = []const u8; pub const String = []const u8;
@ -135,7 +138,7 @@ pub const PublishDiagnosticsParams = struct {
}; };
pub const TextDocument = struct { pub const TextDocument = struct {
uri: DocumentUri, uri: String,
// This is a substring of mem starting at 0 // This is a substring of mem starting at 0
text: String, text: String,
// This holds the memory that we have actually allocated. // This holds the memory that we have actually allocated.