Merge pull request #47 from daurnimator/master
Cleanups and rewrite of reading
This commit is contained in:
commit
add13df816
@ -52,11 +52,6 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?
|
|||||||
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
||||||
std.debug.warn("Opened document: {}\n", .{uri});
|
std.debug.warn("Opened document: {}\n", .{uri});
|
||||||
|
|
||||||
errdefer {
|
|
||||||
self.allocator.free(uri);
|
|
||||||
self.allocator.free(text);
|
|
||||||
}
|
|
||||||
|
|
||||||
var handle = Handle{
|
var handle = Handle{
|
||||||
.count = 1,
|
.count = 1,
|
||||||
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
||||||
@ -67,8 +62,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
try self.checkSanity(&handle);
|
try self.checkSanity(&handle);
|
||||||
try self.handles.putNoClobber(uri, handle);
|
const kv = try self.handles.getOrPutValue(uri, handle);
|
||||||
return &(self.handles.get(uri) orelse unreachable).value;
|
return &kv.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
||||||
@ -84,7 +79,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
|
|||||||
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
|
const duped_uri = try std.mem.dupe(self.allocator, u8, uri);
|
||||||
errdefer self.allocator.free(duped_uri);
|
errdefer self.allocator.free(duped_uri);
|
||||||
|
|
||||||
return self.newDocument(duped_uri, duped_text);
|
return try self.newDocument(duped_uri, duped_text);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
||||||
@ -145,7 +140,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (import_strs) |str| {
|
for (import_strs) |str| {
|
||||||
const uri = (try uriFromImportStr(self, handle, str)) orelse continue;
|
const uri = (try uriFromImportStr(self, handle.*, str)) orelse continue;
|
||||||
defer self.allocator.free(uri);
|
defer self.allocator.free(uri);
|
||||||
|
|
||||||
var idx: usize = 0;
|
var idx: usize = 0;
|
||||||
@ -175,7 +170,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
|
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
|
||||||
var document = &handle.document;
|
const document = &handle.document;
|
||||||
|
|
||||||
for (content_changes.items) |change| {
|
for (content_changes.items) |change| {
|
||||||
if (change.Object.getValue("range")) |range| {
|
if (change.Object.getValue("range")) |range| {
|
||||||
@ -230,7 +225,7 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
try self.checkSanity(handle);
|
try self.checkSanity(handle);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 {
|
fn uriFromImportStr(store: *DocumentStore, handle: Handle, import_str: []const u8) !?[]const u8 {
|
||||||
return if (std.mem.eql(u8, import_str, "std"))
|
return if (std.mem.eql(u8, import_str, "std"))
|
||||||
if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri)
|
if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri)
|
||||||
else {
|
else {
|
||||||
@ -264,7 +259,7 @@ pub const AnalysisContext = struct {
|
|||||||
|
|
||||||
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
|
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
|
||||||
const allocator = self.store.allocator;
|
const allocator = self.store.allocator;
|
||||||
const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null;
|
const final_uri = (try uriFromImportStr(self.store, self.handle.*, import_str)) orelse return null;
|
||||||
|
|
||||||
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
||||||
var consumed_final_uri = false;
|
var consumed_final_uri = false;
|
||||||
|
44
src/header.zig
Normal file
44
src/header.zig
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const mem = std.mem;
|
||||||
|
|
||||||
|
const RequestHeader = struct {
|
||||||
|
content_length: usize,
|
||||||
|
|
||||||
|
/// null implies "application/vscode-jsonrpc; charset=utf-8"
|
||||||
|
content_type: ?[]const u8,
|
||||||
|
|
||||||
|
pub fn deinit(self: @This(), allocator: *mem.Allocator) void {
|
||||||
|
if (self.content_type) |ct| allocator.free(ct);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn readRequestHeader(allocator: *mem.Allocator, instream: var) !RequestHeader {
|
||||||
|
var r = RequestHeader{
|
||||||
|
.content_length = undefined,
|
||||||
|
.content_type = null,
|
||||||
|
};
|
||||||
|
errdefer r.deinit(allocator);
|
||||||
|
|
||||||
|
var has_content_length = false;
|
||||||
|
while (true) {
|
||||||
|
const header = try instream.readUntilDelimiterAlloc(allocator, '\n', 0x100);
|
||||||
|
defer allocator.free(header);
|
||||||
|
if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn;
|
||||||
|
if (header.len == 1) break;
|
||||||
|
|
||||||
|
const header_name = header[0..mem.indexOf(u8, header, ": ") orelse return error.MissingColon];
|
||||||
|
const header_value = header[header_name.len + 2..header.len-1];
|
||||||
|
if (mem.eql(u8, header_name, "Content-Length")) {
|
||||||
|
if (header_value.len == 0) return error.MissingHeaderValue;
|
||||||
|
r.content_length = std.fmt.parseInt(usize, header_value, 10) catch return error.InvalidContentLength;
|
||||||
|
has_content_length = true;
|
||||||
|
} else if (mem.eql(u8, header_name, "Content-Type")) {
|
||||||
|
r.content_type = try mem.dupe(allocator, u8, header_value);
|
||||||
|
} else {
|
||||||
|
return error.UnknownHeader;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!has_content_length) return error.MissingContentLength;
|
||||||
|
|
||||||
|
return r;
|
||||||
|
}
|
100
src/main.zig
100
src/main.zig
@ -4,6 +4,7 @@ const build_options = @import("build_options");
|
|||||||
const Config = @import("config.zig");
|
const Config = @import("config.zig");
|
||||||
const DocumentStore = @import("document_store.zig");
|
const DocumentStore = @import("document_store.zig");
|
||||||
const DebugAllocator = @import("debug_allocator.zig");
|
const DebugAllocator = @import("debug_allocator.zig");
|
||||||
|
const readRequestHeader = @import("header.zig").readRequestHeader;
|
||||||
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
const analysis = @import("analysis.zig");
|
const analysis = @import("analysis.zig");
|
||||||
@ -306,7 +307,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
|
|
||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
|
|
||||||
var line = try handle.document.getLine(@intCast(usize, position.line));
|
const line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]);
|
var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]);
|
||||||
|
|
||||||
// var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position));
|
// var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position));
|
||||||
@ -620,9 +621,10 @@ pub fn main() anyerror!void {
|
|||||||
const stdin = std.io.getStdIn().inStream();
|
const stdin = std.io.getStdIn().inStream();
|
||||||
stdout = std.io.getStdOut().outStream();
|
stdout = std.io.getStdOut().outStream();
|
||||||
|
|
||||||
// Read he configuration, if any.
|
// Read the configuration, if any.
|
||||||
var config = Config{};
|
|
||||||
const config_parse_options = std.json.ParseOptions{ .allocator = allocator };
|
const config_parse_options = std.json.ParseOptions{ .allocator = allocator };
|
||||||
|
var config = Config{};
|
||||||
|
defer std.json.parseFree(Config, config, config_parse_options);
|
||||||
|
|
||||||
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
||||||
config_read: {
|
config_read: {
|
||||||
@ -632,31 +634,27 @@ pub fn main() anyerror!void {
|
|||||||
var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read;
|
var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read;
|
||||||
defer exec_dir.close();
|
defer exec_dir.close();
|
||||||
|
|
||||||
var conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read;
|
const conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read;
|
||||||
defer conf_file.close();
|
defer conf_file.close();
|
||||||
|
|
||||||
const conf_file_stat = conf_file.stat() catch break :config_read;
|
// Max 1MB
|
||||||
|
const file_buf = conf_file.inStream().readAllAlloc(allocator, 0x1000000) catch break :config_read;
|
||||||
// Allocate enough memory for the whole file.
|
|
||||||
var file_buf = try allocator.alloc(u8, conf_file_stat.size);
|
|
||||||
defer allocator.free(file_buf);
|
defer allocator.free(file_buf);
|
||||||
|
|
||||||
const bytes_read = conf_file.readAll(file_buf) catch break :config_read;
|
|
||||||
if (bytes_read != conf_file_stat.size) break :config_read;
|
|
||||||
|
|
||||||
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
|
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
|
||||||
config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| {
|
config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| {
|
||||||
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
|
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
|
||||||
break :config_read;
|
break :config_read;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
defer std.json.parseFree(Config, config, config_parse_options);
|
|
||||||
|
|
||||||
if (config.zig_lib_path != null and !std.fs.path.isAbsolute(config.zig_lib_path.?)) {
|
if (config.zig_lib_path) |zig_lib_path| {
|
||||||
|
if (!std.fs.path.isAbsolute(zig_lib_path)) {
|
||||||
std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{});
|
std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{});
|
||||||
allocator.free(config.zig_lib_path.?);
|
allocator.free(zig_lib_path);
|
||||||
config.zig_lib_path = null;
|
config.zig_lib_path = null;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try document_store.init(allocator, config.zig_lib_path);
|
try document_store.init(allocator, config.zig_lib_path);
|
||||||
defer document_store.deinit();
|
defer document_store.deinit();
|
||||||
@ -665,74 +663,18 @@ pub fn main() anyerror!void {
|
|||||||
var json_parser = std.json.Parser.init(allocator, false);
|
var json_parser = std.json.Parser.init(allocator, false);
|
||||||
defer json_parser.deinit();
|
defer json_parser.deinit();
|
||||||
|
|
||||||
var offset: usize = 0;
|
while (true) {
|
||||||
var bytes_read: usize = 0;
|
const headers = readRequestHeader(allocator, stdin) catch |err| {
|
||||||
|
try log("{}; exiting!", .{@errorName(err)});
|
||||||
var index: usize = 0;
|
|
||||||
var content_len: usize = 0;
|
|
||||||
|
|
||||||
stdin_poll: while (true) {
|
|
||||||
if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) {
|
|
||||||
index = 16;
|
|
||||||
while (index <= offset + 10) : (index += 1) {
|
|
||||||
const c = buffer.items[index];
|
|
||||||
if (c >= '0' and c <= '9') {
|
|
||||||
content_len = content_len * 10 + (c - '0');
|
|
||||||
} else if (c == '\r' and buffer.items[index + 1] == '\n') {
|
|
||||||
index += 2;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (buffer.items[index] == '\r') {
|
|
||||||
index += 2;
|
|
||||||
if (buffer.items.len < index + content_len) {
|
|
||||||
try buffer.resize(index + content_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
body_poll: while (offset < content_len + index) {
|
|
||||||
bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]);
|
|
||||||
if (bytes_read == 0) {
|
|
||||||
try log("0 bytes read; exiting!", .{});
|
|
||||||
return;
|
return;
|
||||||
}
|
};
|
||||||
|
defer headers.deinit(allocator);
|
||||||
offset += bytes_read;
|
const buf = try allocator.alloc(u8, headers.content_length);
|
||||||
}
|
defer allocator.free(buf);
|
||||||
|
try stdin.readNoEof(buf);
|
||||||
try processJsonRpc(&json_parser, buffer.items[index .. index + content_len], config);
|
try processJsonRpc(&json_parser, buf, config);
|
||||||
json_parser.reset();
|
json_parser.reset();
|
||||||
|
|
||||||
offset = 0;
|
|
||||||
content_len = 0;
|
|
||||||
} else {
|
|
||||||
try log("\\r not found", .{});
|
|
||||||
}
|
|
||||||
} else if (offset >= 16) {
|
|
||||||
try log("Offset is greater than 16!", .{});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (offset < 16) {
|
|
||||||
bytes_read = try stdin.readAll(buffer.items[offset..25]);
|
|
||||||
} else {
|
|
||||||
if (offset == buffer.items.len) {
|
|
||||||
try buffer.resize(buffer.items.len * 2);
|
|
||||||
}
|
|
||||||
if (index + content_len > buffer.items.len) {
|
|
||||||
bytes_read = try stdin.readAll(buffer.items[offset..buffer.items.len]);
|
|
||||||
} else {
|
|
||||||
bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bytes_read == 0) {
|
|
||||||
try log("0 bytes read; exiting!", .{});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
offset += bytes_read;
|
|
||||||
|
|
||||||
if (debug_alloc) |dbg| {
|
if (debug_alloc) |dbg| {
|
||||||
try log("{}", .{dbg.info});
|
try log("{}", .{dbg.info});
|
||||||
}
|
}
|
||||||
|
@ -185,11 +185,11 @@ pub const MarkupKind = enum(u1) {
|
|||||||
options: json.StringifyOptions,
|
options: json.StringifyOptions,
|
||||||
out_stream: var,
|
out_stream: var,
|
||||||
) !void {
|
) !void {
|
||||||
if (@enumToInt(value) == 0) {
|
const str = switch (value) {
|
||||||
try json.stringify("plaintext", options, out_stream);
|
.PlainText => "plaintext",
|
||||||
} else {
|
.Markdown => "markdown",
|
||||||
try json.stringify("markdown", options, out_stream);
|
};
|
||||||
}
|
try json.stringify(str, options, out_stream);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 {
|
|||||||
var buf = std.ArrayList(u8).init(allocator);
|
var buf = std.ArrayList(u8).init(allocator);
|
||||||
try buf.appendSlice(prefix);
|
try buf.appendSlice(prefix);
|
||||||
|
|
||||||
var out_stream = buf.outStream();
|
const out_stream = buf.outStream();
|
||||||
|
|
||||||
for (path) |char| {
|
for (path) |char| {
|
||||||
if (char == std.fs.path.sep) {
|
if (char == std.fs.path.sep) {
|
||||||
@ -55,17 +55,16 @@ fn parseHex(c: u8) !u8 {
|
|||||||
pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 {
|
pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 {
|
||||||
if (str.len < 7 or !std.mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme;
|
if (str.len < 7 or !std.mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme;
|
||||||
|
|
||||||
var uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7));
|
const uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7));
|
||||||
errdefer allocator.free(uri);
|
errdefer allocator.free(uri);
|
||||||
|
|
||||||
const path = if (std.fs.path.sep == '\\') str[8..] else str[7..];
|
const path = if (std.fs.path.sep == '\\') str[8..] else str[7..];
|
||||||
|
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
var j: usize = 0;
|
var j: usize = 0;
|
||||||
var e: usize = path.len;
|
while (j < path.len) : (i += 1) {
|
||||||
while (j < e) : (i += 1) {
|
|
||||||
if (path[j] == '%') {
|
if (path[j] == '%') {
|
||||||
if (j + 2 >= e) return error.UriBadEscape;
|
if (j + 2 >= path.len) return error.UriBadEscape;
|
||||||
const upper = try parseHex(path[j + 1]);
|
const upper = try parseHex(path[j + 1]);
|
||||||
const lower = try parseHex(path[j + 2]);
|
const lower = try parseHex(path[j + 2]);
|
||||||
uri[i] = (upper << 4) + lower;
|
uri[i] = (upper << 4) + lower;
|
||||||
|
Loading…
Reference in New Issue
Block a user