First draft of imports

This commit is contained in:
Alexandros Naskos 2020-05-14 04:54:05 +03:00
parent 307dceb703
commit 31f1d2fa3e
6 changed files with 183 additions and 44 deletions

View File

@ -1,5 +1,5 @@
const std = @import("std");
const types = @import("types.zig");
const ImportContext = @import("document_store.zig").ImportContext;
const ast = std.zig.ast;
/// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing)
@ -178,7 +178,7 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node {
}
/// Resolves the type of a node
pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.ImportCtx) ?*ast.Node {
pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node {
switch (node.id) {
.VarDecl => {
const vari = node.cast(ast.Node.VarDecl).?;
@ -240,10 +240,9 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.Im
const import_param = builtin_call.params.at(0).*;
if (import_param.id != .StringLiteral) return null;
var import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
import_str = import_str[1 .. import_str.len - 1];
return resolveImport(import_str);
const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
// @TODO: Handle error better.
return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable);
},
else => {
std.debug.warn("Type resolution case not implemented; {}\n", .{node.id});
@ -252,13 +251,7 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.Im
return null;
}
fn resolveImport(import: []const u8) ?*ast.Node {
// @TODO: Write this
std.debug.warn("Resolving import {}\n", .{import});
return null;
}
pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *types.ImportCtx) ?*ast.Node {
pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *ImportContext) ?*ast.Node {
var current_node = &tree.root_node.base;
while (true) {

View File

@ -4,4 +4,4 @@
enable_snippets: bool = true,
/// zig installation path
zig_path: ?[]const u8 = null,
zig_lib_path: ?[]const u8 = null,

View File

@ -1,12 +1,13 @@
const std = @import("std");
const types = @import("types.zig");
const URI = @import("uri.zig");
const DocumentStore = @This();
pub const Handle = struct {
document: types.TextDocument,
count: usize,
import_uris: [][]const u8,
import_uris: std.ArrayList([]const u8),
pub fn uri(handle: Handle) []const u8 {
return handle.document.uri;
@ -34,32 +35,37 @@ pub const Handle = struct {
allocator: *std.mem.Allocator,
handles: std.StringHashMap(Handle),
std_path: ?[]const u8,
std_uri: ?[]const u8,
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_path: ?[]const u8) void {
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !void {
self.allocator = allocator;
self.handles = std.StringHashMap(Handle).init(allocator);
errdefer self.handles.deinit();
if (zig_path) |zpath| {
// pub fn resolve(allocator: *Allocator, paths: []const []const u8) ![]u8
self.std_path = std.fs.path.resolve(allocator, &[_][]const u8 {
zpath, "lib/zig/std"
if (zig_lib_path) |zpath| {
const std_path = std.fs.path.resolve(allocator, &[_][]const u8 {
zpath, "./std/std.zig"
}) catch |err| block: {
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
break :block null;
self.std_uri = null;
return;
};
defer allocator.free(std_path);
// Get the std_path as a URI, so we can just append to it!
self.std_uri = try URI.fromPath(allocator, std_path);
std.debug.warn("Standard library base uri: {}\n", .{self.std_uri});
} else {
self.std_path = null;
self.std_uri = null;
}
}
// TODO: Normalize URIs somehow, probably just lowercase
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
if (self.handles.get(uri)) |entry| {
std.debug.warn("Document already open: {}, incrementing count\n", .{uri});
entry.value.count += 1;
std.debug.warn("New count: {}\n", .{entry.value.count});
self.allocator.free(uri);
return &entry.value;
}
@ -71,7 +77,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
var handle = Handle{
.count = 1,
.import_uris = &[_][]const u8 {},
.import_uris = std.ArrayList([]const u8).init(self.allocator),
.document = .{
.uri = duped_uri,
.text = duped_text,
@ -97,14 +103,12 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
self.allocator.free(sane);
}
for (entry.value.import_uris) |import_uri| {
for (entry.value.import_uris.items) |import_uri| {
self.decrementCount(import_uri);
self.allocator.free(import_uri);
}
if (entry.value.import_uris.len > 0) {
self.allocator.free(entry.value.import_uris);
}
entry.value.import_uris.deinit();
const uri_key = entry.key;
self.handles.removeAssertDiscard(uri);
@ -129,14 +133,14 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void {
const dirty_tree = try handle.dirtyTree(self.allocator);
defer dirty_tree.deinit();
if (dirty_tree.errors.len == 0) {
std.debug.warn("New sane text for document {}\n", .{handle.uri()});
if (handle.document.sane_text) |sane| {
self.allocator.free(sane);
}
if (dirty_tree.errors.len > 0) return;
handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text);
std.debug.warn("New sane text for document {}\n", .{handle.uri()});
if (handle.document.sane_text) |sane| {
self.allocator.free(sane);
}
handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text);
}
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
@ -195,6 +199,118 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
try self.checkSanity(handle);
}
// @TODO: We only reduce the count upon closing,
// find a way to reduce it when removing imports.
// Perhaps on new sane text we can go through imports
// and remove those that are in the import_uris table
// but not in the file anymore.
pub const ImportContext = struct {
store: *DocumentStore,
handle: *Handle,
trees: std.ArrayList(*std.zig.ast.Tree),
pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node {
const allocator = self.store.allocator;
const final_uri = if (std.mem.eql(u8, import_str, "std"))
if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri)
else {
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
return null;
}
else b: {
// Find relative uri
const path = try URI.parse(allocator, self.handle.uri());
defer allocator.free(path);
const dir_path = std.fs.path.dirname(path) orelse "";
const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 {
dir_path, import_str
});
break :b import_path;
};
// @TODO Clean up code, lots of repetition
{
errdefer allocator.free(final_uri);
// Check if we already imported this.
for (self.handle.import_uris.items) |uri| {
// If we did, set our new handle and return the parsed tree root node.
if (std.mem.eql(u8, uri, final_uri)) {
self.handle = self.store.getHandle(final_uri) orelse return null;
if (try self.handle.saneTree(allocator)) |tree| {
try self.trees.append(tree);
return &tree.root_node.base;
}
return null;
}
}
}
// New import.
// Add to import table of current handle.
try self.handle.import_uris.append(final_uri);
// Check if the import is already opened by others.
if (self.store.getHandle(final_uri)) |new_handle| {
// If it is, increment the count, set our new handle and return the parsed tree root node.
new_handle.count += 1;
self.handle = new_handle;
if (try self.handle.saneTree(allocator)) |tree| {
try self.trees.append(tree);
return &tree.root_node.base;
}
return null;
}
// New document, read the file then call into openDocument.
const file_path = try URI.parse(allocator, final_uri);
defer allocator.free(file_path);
var file = std.fs.cwd().openFile(file_path, .{}) catch {
std.debug.warn("Cannot open import file {}", .{file_path});
return null;
};
defer file.close();
const size = std.math.cast(usize, try file.getEndPos()) catch std.math.maxInt(usize);
// TODO: This is wasteful, we know we don't need to copy the text on this openDocument call
const file_contents = try allocator.alloc(u8, size);
defer allocator.free(file_contents);
file.inStream().readNoEof(file_contents) catch {
std.debug.warn("Could not read from file {}", .{file_path});
return null;
};
self.handle = try openDocument(self.store, final_uri, file_contents);
if (try self.handle.saneTree(allocator)) |tree| {
try self.trees.append(tree);
return &tree.root_node.base;
}
return null;
}
pub fn deinit(self: *ImportContext) void {
for (self.trees.items) |tree| {
tree.deinit();
}
self.trees.deinit();
}
};
pub fn importContext(self: *DocumentStore, handle: *Handle) ImportContext {
return .{
.store = self,
.handle = handle,
.trees = std.ArrayList(*std.zig.ast.Tree).init(self.allocator),
};
}
pub fn deinit(self: *DocumentStore) void {
// @TODO: Deinit everything!

View File

@ -253,7 +253,7 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void {
});
}
fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Position, config: Config) !void {
fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void {
const tree = (try handle.saneTree(allocator)) orelse {
return try send(types.Response{
.id = .{.Integer = id},
@ -277,7 +277,9 @@ fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Po
var tokenizer = std.zig.Tokenizer.init(line);
// @TODO Pass import ctx.
if (analysis.getFieldAccessTypeNode(tree, &tokenizer, {})) |node| {
var import_ctx = document_store.importContext(handle);
defer import_ctx.deinit();
if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| {
var index: usize = 0;
while (node.iterate(index)) |child_node| {
if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| {
@ -523,7 +525,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
} else if (pos_context == .var_access or pos_context == .empty) {
try completeGlobal(id, handle.*, config);
} else if (pos_context == .field_access) {
try completeFieldAccess(id, handle.*, pos, config);
try completeFieldAccess(id, handle, pos, config);
} else {
try respondGeneric(id, no_completions_response);
}
@ -612,7 +614,8 @@ pub fn main() anyerror!void {
}
defer std.json.parseFree(Config, config, config_parse_options);
document_store.init(allocator, config.zig_path);
// @TODO Check is_absolute
try document_store.init(allocator, config.zig_lib_path);
defer document_store.deinit();
// This JSON parser is passed to processJsonRpc and reset.

View File

@ -3,9 +3,6 @@
const std = @import("std");
const json = std.json;
// @TODO
pub const ImportCtx = void;
// JSON Types
pub const String = []const u8;
@ -138,7 +135,7 @@ pub const PublishDiagnosticsParams = struct {
};
pub const TextDocument = struct {
uri: String,
uri: DocumentUri,
// This is a substring of mem starting at 0
text: String,
// This holds the memory that we have actually allocated.

View File

@ -1,7 +1,37 @@
const std = @import("std");
// Original code: https://github.com/andersfr/zig-lsp/blob/master/uri.zig
const reserved_chars = &[_]u8 {
'!', '#', '$', '%', '&', '\'',
'(', ')', '*', '+', ',', ':',
';', '=', '?', '@', '[', ']',
};
/// Returns a URI from a path, caller owns the memory allocated with `allocator`
pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 {
if (path.len == 0) return "";
const prefix = if (std.builtin.os.tag == .windows) "file:///" else "file://";
var buf = std.ArrayList(u8).init(allocator);
try buf.appendSlice(prefix);
var out_stream = buf.outStream();
for (path) |char| {
if (char == std.fs.path.sep) {
try buf.append('/');
} else if (std.mem.indexOfScalar(u8, reserved_chars, char) != null) {
// Write '%' + hex with uppercase
try buf.append('%');
try std.fmt.format(out_stream, "{X}", .{char});
} else {
try buf.append(std.ascii.toLower(char));
}
}
return buf.toOwnedSlice();
}
// Original code: https://github.com/andersfr/zig-lsp/blob/master/uri.zig
fn parseHex(c: u8) !u8 {
return switch(c) {
'0'...'9' => c - '0',