diff --git a/src/document_store.zig b/src/document_store.zig index 80f0dfc..7d52f57 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -52,11 +52,6 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { std.debug.warn("Opened document: {}\n", .{uri}); - errdefer { - self.allocator.free(uri); - self.allocator.free(text); - } - var handle = Handle{ .count = 1, .import_uris = std.ArrayList([]const u8).init(self.allocator), @@ -67,8 +62,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { }, }; try self.checkSanity(&handle); - try self.handles.putNoClobber(uri, handle); - return &(self.handles.get(uri) orelse unreachable).value; + const kv = try self.handles.getOrPutValue(uri, handle); + return &kv.value; } pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { @@ -84,7 +79,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H const duped_uri = try std.mem.dupe(self.allocator, u8, uri); errdefer self.allocator.free(duped_uri); - return self.newDocument(duped_uri, duped_text); + return try self.newDocument(duped_uri, duped_text); } fn decrementCount(self: *DocumentStore, uri: []const u8) void { @@ -145,7 +140,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } for (import_strs) |str| { - const uri = (try uriFromImportStr(self, handle, str)) orelse continue; + const uri = (try uriFromImportStr(self, handle.*, str)) orelse continue; defer self.allocator.free(uri); var idx: usize = 0; @@ -175,7 +170,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { - var document = &handle.document; + const document = &handle.document; for (content_changes.items) |change| { if (change.Object.getValue("range")) |range| { @@ -230,7 +225,7 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. try self.checkSanity(handle); } -fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 { +fn uriFromImportStr(store: *DocumentStore, handle: Handle, import_str: []const u8) !?[]const u8 { return if (std.mem.eql(u8, import_str, "std")) if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) else { @@ -264,7 +259,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null; + const final_uri = (try uriFromImportStr(self.store, self.handle.*, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false; diff --git a/src/header.zig b/src/header.zig new file mode 100644 index 0000000..3483e24 --- /dev/null +++ b/src/header.zig @@ -0,0 +1,44 @@ +const std = @import("std"); +const mem = std.mem; + +const RequestHeader = struct { + content_length: usize, + + /// null implies "application/vscode-jsonrpc; charset=utf-8" + content_type: ?[]const u8, + + pub fn deinit(self: @This(), allocator: *mem.Allocator) void { + if (self.content_type) |ct| allocator.free(ct); + } +}; + +pub fn readRequestHeader(allocator: *mem.Allocator, instream: var) !RequestHeader { + var r = RequestHeader{ + .content_length = undefined, + .content_type = null, + }; + errdefer r.deinit(allocator); + + var has_content_length = false; + while (true) { + const header = try instream.readUntilDelimiterAlloc(allocator, '\n', 0x100); + defer allocator.free(header); + if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn; + if (header.len == 1) break; + + const header_name = header[0..mem.indexOf(u8, header, ": ") orelse return error.MissingColon]; + const header_value = header[header_name.len + 2..header.len-1]; + if (mem.eql(u8, header_name, "Content-Length")) { + if (header_value.len == 0) return error.MissingHeaderValue; + r.content_length = std.fmt.parseInt(usize, header_value, 10) catch return error.InvalidContentLength; + has_content_length = true; + } else if (mem.eql(u8, header_name, "Content-Type")) { + r.content_type = try mem.dupe(allocator, u8, header_value); + } else { + return error.UnknownHeader; + } + } + if (!has_content_length) return error.MissingContentLength; + + return r; +} diff --git a/src/main.zig b/src/main.zig index 739b06a..a5d78fe 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4,6 +4,7 @@ const build_options = @import("build_options"); const Config = @import("config.zig"); const DocumentStore = @import("document_store.zig"); const DebugAllocator = @import("debug_allocator.zig"); +const readRequestHeader = @import("header.zig").readRequestHeader; const data = @import("data/" ++ build_options.data_version ++ ".zig"); const types = @import("types.zig"); const analysis = @import("analysis.zig"); @@ -306,7 +307,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); - var line = try handle.document.getLine(@intCast(usize, position.line)); + const line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]); // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); @@ -620,9 +621,10 @@ pub fn main() anyerror!void { const stdin = std.io.getStdIn().inStream(); stdout = std.io.getStdOut().outStream(); - // Read he configuration, if any. - var config = Config{}; + // Read the configuration, if any. const config_parse_options = std.json.ParseOptions{ .allocator = allocator }; + var config = Config{}; + defer std.json.parseFree(Config, config, config_parse_options); // TODO: Investigate using std.fs.Watch to detect writes to the config and reload it. config_read: { @@ -632,30 +634,26 @@ pub fn main() anyerror!void { var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read; defer exec_dir.close(); - var conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read; + const conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read; defer conf_file.close(); - const conf_file_stat = conf_file.stat() catch break :config_read; - - // Allocate enough memory for the whole file. - var file_buf = try allocator.alloc(u8, conf_file_stat.size); + // Max 1MB + const file_buf = conf_file.inStream().readAllAlloc(allocator, 0x1000000) catch break :config_read; defer allocator.free(file_buf); - const bytes_read = conf_file.readAll(file_buf) catch break :config_read; - if (bytes_read != conf_file_stat.size) break :config_read; - // TODO: Better errors? Doesn't seem like std.json can provide us positions or context. config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| { std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); break :config_read; }; } - defer std.json.parseFree(Config, config, config_parse_options); - if (config.zig_lib_path != null and !std.fs.path.isAbsolute(config.zig_lib_path.?)) { - std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{}); - allocator.free(config.zig_lib_path.?); - config.zig_lib_path = null; + if (config.zig_lib_path) |zig_lib_path| { + if (!std.fs.path.isAbsolute(zig_lib_path)) { + std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{}); + allocator.free(zig_lib_path); + config.zig_lib_path = null; + } } try document_store.init(allocator, config.zig_lib_path); @@ -665,73 +663,17 @@ pub fn main() anyerror!void { var json_parser = std.json.Parser.init(allocator, false); defer json_parser.deinit(); - var offset: usize = 0; - var bytes_read: usize = 0; - - var index: usize = 0; - var content_len: usize = 0; - - stdin_poll: while (true) { - if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) { - index = 16; - while (index <= offset + 10) : (index += 1) { - const c = buffer.items[index]; - if (c >= '0' and c <= '9') { - content_len = content_len * 10 + (c - '0'); - } else if (c == '\r' and buffer.items[index + 1] == '\n') { - index += 2; - break; - } - } - - if (buffer.items[index] == '\r') { - index += 2; - if (buffer.items.len < index + content_len) { - try buffer.resize(index + content_len); - } - - body_poll: while (offset < content_len + index) { - bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]); - if (bytes_read == 0) { - try log("0 bytes read; exiting!", .{}); - return; - } - - offset += bytes_read; - } - - try processJsonRpc(&json_parser, buffer.items[index .. index + content_len], config); - json_parser.reset(); - - offset = 0; - content_len = 0; - } else { - try log("\\r not found", .{}); - } - } else if (offset >= 16) { - try log("Offset is greater than 16!", .{}); + while (true) { + const headers = readRequestHeader(allocator, stdin) catch |err| { + try log("{}; exiting!", .{@errorName(err)}); return; - } - - if (offset < 16) { - bytes_read = try stdin.readAll(buffer.items[offset..25]); - } else { - if (offset == buffer.items.len) { - try buffer.resize(buffer.items.len * 2); - } - if (index + content_len > buffer.items.len) { - bytes_read = try stdin.readAll(buffer.items[offset..buffer.items.len]); - } else { - bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]); - } - } - - if (bytes_read == 0) { - try log("0 bytes read; exiting!", .{}); - return; - } - - offset += bytes_read; + }; + defer headers.deinit(allocator); + const buf = try allocator.alloc(u8, headers.content_length); + defer allocator.free(buf); + try stdin.readNoEof(buf); + try processJsonRpc(&json_parser, buf, config); + json_parser.reset(); if (debug_alloc) |dbg| { try log("{}", .{dbg.info}); diff --git a/src/types.zig b/src/types.zig index 80094ac..97c1f2f 100644 --- a/src/types.zig +++ b/src/types.zig @@ -185,11 +185,11 @@ pub const MarkupKind = enum(u1) { options: json.StringifyOptions, out_stream: var, ) !void { - if (@enumToInt(value) == 0) { - try json.stringify("plaintext", options, out_stream); - } else { - try json.stringify("markdown", options, out_stream); - } + const str = switch (value) { + .PlainText => "plaintext", + .Markdown => "markdown", + }; + try json.stringify(str, options, out_stream); } }; diff --git a/src/uri.zig b/src/uri.zig index a331cb3..ad9d4fb 100644 --- a/src/uri.zig +++ b/src/uri.zig @@ -14,7 +14,7 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 { var buf = std.ArrayList(u8).init(allocator); try buf.appendSlice(prefix); - var out_stream = buf.outStream(); + const out_stream = buf.outStream(); for (path) |char| { if (char == std.fs.path.sep) { @@ -55,17 +55,16 @@ fn parseHex(c: u8) !u8 { pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 { if (str.len < 7 or !std.mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme; - var uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7)); + const uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7)); errdefer allocator.free(uri); const path = if (std.fs.path.sep == '\\') str[8..] else str[7..]; var i: usize = 0; var j: usize = 0; - var e: usize = path.len; - while (j < e) : (i += 1) { + while (j < path.len) : (i += 1) { if (path[j] == '%') { - if (j + 2 >= e) return error.UriBadEscape; + if (j + 2 >= path.len) return error.UriBadEscape; const upper = try parseHex(path[j + 1]); const lower = try parseHex(path[j + 2]); uri[i] = (upper << 4) + lower;