Use std.log

This commit is contained in:
Alexandros Naskos 2020-06-26 14:29:59 +03:00
parent b3f6143e08
commit 4952c34481
5 changed files with 110 additions and 55 deletions

View File

@ -801,7 +801,7 @@ pub fn resolveTypeOfNodeInternal(
const import_str = handle.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); const import_str = handle.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| block: { const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| block: {
std.debug.print("Error {} while processing import {}\n", .{ err, import_str }); std.log.debug(.analysis, "Error {} while processing import {}\n", .{ err, import_str });
return null; return null;
}) orelse return null; }) orelse return null;
@ -836,7 +836,7 @@ pub fn resolveTypeOfNodeInternal(
.type = .{ .data = .{ .other = node }, .is_type_val = false }, .type = .{ .data = .{ .other = node }, .is_type_val = false },
.handle = handle, .handle = handle,
}, },
else => {}, //std.debug.print("Type resolution case not implemented; {}\n", .{node.id}), else => {}, //std.log.debug(.analysis, "Type resolution case not implemented; {}\n", .{node.id}),
} }
return null; return null;
} }
@ -1031,7 +1031,7 @@ pub fn getFieldAccessType(
current_type = (try resolveUnwrapOptionalType(store, arena, current_type, &bound_type_params)) orelse return null; current_type = (try resolveUnwrapOptionalType(store, arena, current_type, &bound_type_params)) orelse return null;
}, },
else => { else => {
std.debug.print("Unrecognized token {} after period.\n", .{after_period.id}); std.log.debug(.analysis, "Unrecognized token {} after period.\n", .{after_period.id});
return null; return null;
}, },
} }
@ -1082,7 +1082,7 @@ pub fn getFieldAccessType(
current_type = (try resolveBracketAccessType(store, arena, current_type, if (is_range) .Range else .Single, &bound_type_params)) orelse return null; current_type = (try resolveBracketAccessType(store, arena, current_type, if (is_range) .Range else .Single, &bound_type_params)) orelse return null;
}, },
else => { else => {
std.debug.print("Unimplemented token: {}\n", .{tok.id}); std.log.debug(.analysis, "Unimplemented token: {}\n", .{tok.id});
return null; return null;
}, },
} }
@ -1126,7 +1126,7 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
} }
}, },
else => { else => {
std.debug.print("INVALID: {}\n", .{node.id}); std.log.debug(.analysis, "INVALID: {}\n", .{node.id});
}, },
} }
@ -1349,7 +1349,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
}; };
if (getDeclName(tree, node) == null) { if (getDeclName(tree, node) == null) {
std.debug.print("NULL NAME: {}\n", .{node.id}); std.log.debug(.analysis, "NULL NAME: {}\n", .{node.id});
} }
const maybe_name = if (getDeclName(tree, node)) |name| const maybe_name = if (getDeclName(tree, node)) |name|
@ -1848,7 +1848,7 @@ pub const DocumentScope = struct {
pub fn debugPrint(self: DocumentScope) void { pub fn debugPrint(self: DocumentScope) void {
for (self.scopes) |scope| { for (self.scopes) |scope| {
std.debug.print( std.log.debug(.analysis,
\\-------------------------- \\--------------------------
\\Scope {}, range: [{}, {}) \\Scope {}, range: [{}, {})
\\ {} usingnamespaces \\ {} usingnamespaces
@ -1863,10 +1863,10 @@ pub const DocumentScope = struct {
var decl_it = scope.decls.iterator(); var decl_it = scope.decls.iterator();
var idx: usize = 0; var idx: usize = 0;
while (decl_it.next()) |name_decl| : (idx += 1) { while (decl_it.next()) |name_decl| : (idx += 1) {
if (idx != 0) std.debug.print(", ", .{}); if (idx != 0) std.log.debug(.analysis, ", ", .{});
std.debug.print("{}", .{name_decl.key}); std.log.debug(.analysis, "{}", .{name_decl.key});
} }
std.debug.print("\n--------------------------\n", .{}); std.log.debug(.analysis, "\n--------------------------\n", .{});
} }
} }

View File

@ -152,7 +152,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
switch (zig_run_result.term) { switch (zig_run_result.term) {
.Exited => |exit_code| { .Exited => |exit_code| {
if (exit_code == 0) { if (exit_code == 0) {
std.debug.print("Finished zig run for build file {}\n", .{build_file.uri}); std.log.debug(.doc_store, "Finished zig run for build file {}\n", .{build_file.uri});
for (build_file.packages.items) |old_pkg| { for (build_file.packages.items) |old_pkg| {
allocator.free(old_pkg.name); allocator.free(old_pkg.name);
@ -190,7 +190,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
/// This function asserts the document is not open yet and takes ownership /// This function asserts the document is not open yet and takes ownership
/// of the uri and text passed in. /// of the uri and text passed in.
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Handle { fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Handle {
std.debug.print("Opened document: {}\n", .{uri}); std.log.debug(.doc_store, "Opened document: {}\n", .{uri});
var handle = try self.allocator.create(Handle); var handle = try self.allocator.create(Handle);
errdefer self.allocator.destroy(handle); errdefer self.allocator.destroy(handle);
@ -218,7 +218,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
// TODO: Better logic for detecting std or subdirectories? // TODO: Better logic for detecting std or subdirectories?
const in_std = std.mem.indexOf(u8, uri, "/std/") != null; const in_std = std.mem.indexOf(u8, uri, "/std/") != null;
if (self.zig_exe_path != null and std.mem.endsWith(u8, uri, "/build.zig") and !in_std) { if (self.zig_exe_path != null and std.mem.endsWith(u8, uri, "/build.zig") and !in_std) {
std.debug.print("Document is a build file, extracting packages...\n", .{}); std.log.debug(.doc_store, "Document is a build file, extracting packages...\n", .{});
// This is a build file. // This is a build file.
var build_file = try self.allocator.create(BuildFile); var build_file = try self.allocator.create(BuildFile);
errdefer self.allocator.destroy(build_file); errdefer self.allocator.destroy(build_file);
@ -240,7 +240,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
.build_runner_path = self.build_runner_path, .build_runner_path = self.build_runner_path,
.zig_exe_path = self.zig_exe_path.?, .zig_exe_path = self.zig_exe_path.?,
}) catch |err| { }) catch |err| {
std.debug.print("Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err }); std.log.debug(.doc_store, "Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err });
}; };
} else if (self.zig_exe_path != null and !in_std) associate_build_file: { } else if (self.zig_exe_path != null and !in_std) associate_build_file: {
// Look into build files to see if we already have one that fits // Look into build files to see if we already have one that fits
@ -248,7 +248,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
const build_file_base_uri = build_file.uri[0 .. std.mem.lastIndexOfScalar(u8, build_file.uri, '/').? + 1]; const build_file_base_uri = build_file.uri[0 .. std.mem.lastIndexOfScalar(u8, build_file.uri, '/').? + 1];
if (std.mem.startsWith(u8, uri, build_file_base_uri)) { if (std.mem.startsWith(u8, uri, build_file_base_uri)) {
std.debug.print("Found an associated build file: {}\n", .{build_file.uri}); std.log.debug(.doc_store, "Found an associated build file: {}\n", .{build_file.uri});
build_file.refs += 1; build_file.refs += 1;
handle.associated_build_file = build_file; handle.associated_build_file = build_file;
break :associate_build_file; break :associate_build_file;
@ -292,12 +292,12 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
if (self.handles.get(uri)) |entry| { if (self.handles.get(uri)) |entry| {
std.debug.print("Document already open: {}, incrementing count\n", .{uri}); std.log.debug(.doc_store, "Document already open: {}, incrementing count\n", .{uri});
entry.value.count += 1; entry.value.count += 1;
if (entry.value.is_build_file) |build_file| { if (entry.value.is_build_file) |build_file| {
build_file.refs += 1; build_file.refs += 1;
} }
std.debug.print("New count: {}\n", .{entry.value.count}); std.log.debug(.doc_store, "New count: {}\n", .{entry.value.count});
return entry.value; return entry.value;
} }
@ -312,7 +312,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void { fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void {
build_file.refs -= 1; build_file.refs -= 1;
if (build_file.refs == 0) { if (build_file.refs == 0) {
std.debug.print("Freeing build file {}\n", .{build_file.uri}); std.log.debug(.doc_store, "Freeing build file {}\n", .{build_file.uri});
for (build_file.packages.items) |pkg| { for (build_file.packages.items) |pkg| {
self.allocator.free(pkg.name); self.allocator.free(pkg.name);
self.allocator.free(pkg.uri); self.allocator.free(pkg.uri);
@ -338,7 +338,7 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
if (entry.value.count > 0) if (entry.value.count > 0)
return; return;
std.debug.print("Freeing document: {}\n", .{uri}); std.log.debug(.doc_store, "Freeing document: {}\n", .{uri});
if (entry.value.associated_build_file) |build_file| { if (entry.value.associated_build_file) |build_file| {
self.decrementBuildFileRefs(build_file); self.decrementBuildFileRefs(build_file);
@ -378,7 +378,7 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
// Check if the document text is now sane, move it to sane_text if so. // Check if the document text is now sane, move it to sane_text if so.
fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void { fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
std.debug.print("New text for document {}\n", .{handle.uri()}); std.log.debug(.doc_store, "New text for document {}\n", .{handle.uri()});
handle.tree.deinit(); handle.tree.deinit();
handle.tree = try std.zig.parse(self.allocator, handle.document.text); handle.tree = try std.zig.parse(self.allocator, handle.document.text);
@ -424,7 +424,7 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
while (idx < still_exist.len) : (idx += 1) { while (idx < still_exist.len) : (idx += 1) {
if (still_exist[idx]) continue; if (still_exist[idx]) continue;
std.debug.print("Import removed: {}\n", .{handle.import_uris.items[idx - offset]}); std.log.debug(.doc_store, "Import removed: {}\n", .{handle.import_uris.items[idx - offset]});
const uri = handle.import_uris.orderedRemove(idx - offset); const uri = handle.import_uris.orderedRemove(idx - offset);
offset += 1; offset += 1;
@ -441,7 +441,7 @@ pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
.build_runner_path = self.build_runner_path, .build_runner_path = self.build_runner_path,
.zig_exe_path = self.zig_exe_path.?, .zig_exe_path = self.zig_exe_path.?,
}) catch |err| { }) catch |err| {
std.debug.print("Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err }); std.log.debug(.doc_store, "Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err });
}; };
} }
} }
@ -515,7 +515,7 @@ pub fn uriFromImportStr(
) !?[]const u8 { ) !?[]const u8 {
if (std.mem.eql(u8, import_str, "std")) { if (std.mem.eql(u8, import_str, "std")) {
if (self.std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else { if (self.std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else {
std.debug.print("Cannot resolve std library import, path is null.\n", .{}); std.log.debug(.doc_store, "Cannot resolve std library import, path is null.\n", .{});
return null; return null;
} }
} else if (std.mem.eql(u8, import_str, "builtin")) { } else if (std.mem.eql(u8, import_str, "builtin")) {
@ -553,7 +553,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
import_str, import_str,
)) orelse return null; )) orelse return null;
// std.debug.print("Import final URI: {}\n", .{final_uri}); // std.log.debug(.doc_store, "Import final URI: {}\n", .{final_uri});
var consumed_final_uri = false; var consumed_final_uri = false;
defer if (!consumed_final_uri) allocator.free(final_uri); defer if (!consumed_final_uri) allocator.free(final_uri);
@ -582,7 +582,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
defer allocator.free(file_path); defer allocator.free(file_path);
var file = std.fs.cwd().openFile(file_path, .{}) catch { var file = std.fs.cwd().openFile(file_path, .{}) catch {
std.debug.print("Cannot open import file {}\n", .{file_path}); std.log.debug(.doc_store, "Cannot open import file {}\n", .{file_path});
return null; return null;
}; };
@ -594,7 +594,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
errdefer allocator.free(file_contents); errdefer allocator.free(file_contents);
file.inStream().readNoEof(file_contents) catch { file.inStream().readNoEof(file_contents) catch {
std.debug.print("Could not read from file {}\n", .{file_path}); std.log.debug(.doc_store, "Could not read from file {}\n", .{file_path});
return null; return null;
}; };
@ -615,7 +615,7 @@ fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{ const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
zpath, "./std/std.zig", zpath, "./std/std.zig",
}) catch |err| block: { }) catch |err| block: {
std.debug.print("Failed to resolve zig std library path, error: {}\n", .{err}); std.log.debug(.doc_store, "Failed to resolve zig std library path, error: {}\n", .{err});
return null; return null;
}; };

View File

@ -10,6 +10,56 @@ const types = @import("types.zig");
const analysis = @import("analysis.zig"); const analysis = @import("analysis.zig");
const URI = @import("uri.zig"); const URI = @import("uri.zig");
pub const log_level: std.log.Level = switch (std.builtin.mode) {
.Debug => .debug,
else => .notice,
};
pub fn log(
comptime message_level: std.log.Level,
comptime scope: @Type(.EnumLiteral),
comptime format: []const u8,
args: var,
) void {
var message = std.fmt.allocPrint(allocator, "[{}-{}] " ++ format, .{ @tagName(message_level), @tagName(scope) } ++ args) catch |err| {
std.debug.print("Failed to allocPrint message.", .{});
return;
};
if (@enumToInt(message_level) <= @enumToInt(std.log.Level.notice)) {
const message_type: types.MessageType = switch (message_level) {
.info => .Log,
.notice => .Info,
.warn => .Warning,
.err => .Error,
else => .Error,
};
send(types.Notification{
.method = "window/showMessage",
.params = types.NotificationParams{ .ShowMessageParams = .{
.type = message_type,
.message = message, },
},
}) catch |err| {
std.debug.print("Failed to send show message notification (error: {}).", .{err});
};
} else {
const message_type: types.MessageType = if (message_level == .debug)
.Log
else
.Info;
send(types.Notification{
.method = "window/logMessage",
.params = types.NotificationParams{ .LogMessageParams = .{
.type = message_type,
.message = message, },
},
}) catch |err| {
std.debug.print("Failed to send show message notification (error: {}).", .{err});
};
}
}
// Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig // Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig
var stdout: std.io.BufferedOutStream(4096, std.fs.File.OutStream) = undefined; var stdout: std.io.BufferedOutStream(4096, std.fs.File.OutStream) = undefined;
var allocator: *std.mem.Allocator = undefined; var allocator: *std.mem.Allocator = undefined;
@ -850,13 +900,13 @@ fn loadConfig(folder_path: []const u8) ?Config {
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context. // TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| { var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| {
std.debug.print("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); std.log.debug(.main, "Error while parsing configuration file: {}\nUsing default config.\n", .{err});
return null; return null;
}; };
if (config.zig_lib_path) |zig_lib_path| { if (config.zig_lib_path) |zig_lib_path| {
if (!std.fs.path.isAbsolute(zig_lib_path)) { if (!std.fs.path.isAbsolute(zig_lib_path)) {
std.debug.print("zig library path is not absolute, defaulting to null.\n", .{}); std.log.debug(.main, "zig library path is not absolute, defaulting to null.\n", .{});
allocator.free(zig_lib_path); allocator.free(zig_lib_path);
config.zig_lib_path = null; config.zig_lib_path = null;
} }
@ -908,7 +958,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const start_time = std.time.milliTimestamp(); const start_time = std.time.milliTimestamp();
defer { defer {
const end_time = std.time.milliTimestamp(); const end_time = std.time.milliTimestamp();
std.debug.print("Took {}ms to process method {}\n", .{ end_time - start_time, method }); std.log.debug(.main, "Took {}ms to process method {}\n", .{ end_time - start_time, method });
} }
// Core // Core
@ -955,11 +1005,11 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
if (params.getValue("workspaceFolders")) |workspace_folders| { if (params.getValue("workspaceFolders")) |workspace_folders| {
switch (workspace_folders) { switch (workspace_folders) {
.Array => |folders| { .Array => |folders| {
std.debug.print("Got workspace folders in initialization.\n", .{}); std.log.debug(.main, "Got workspace folders in initialization.\n", .{});
for (folders.items) |workspace_folder| { for (folders.items) |workspace_folder| {
const folder_uri = workspace_folder.Object.getValue("uri").?.String; const folder_uri = workspace_folder.Object.getValue("uri").?.String;
std.debug.print("Loaded folder {}\n", .{folder_uri}); std.log.debug(.main, "Loaded folder {}\n", .{folder_uri});
const duped_uri = try std.mem.dupe(allocator, u8, folder_uri); const duped_uri = try std.mem.dupe(allocator, u8, folder_uri);
try workspace_folder_configs.putNoClobber(duped_uri, null); try workspace_folder_configs.putNoClobber(duped_uri, null);
} }
@ -969,8 +1019,9 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
} }
} }
std.debug.print("{}\n", .{client_capabilities}); std.log.debug(.main, "{}\n", .{client_capabilities});
try respondGeneric(id, initialize_response); try respondGeneric(id, initialize_response);
std.log.notice(.main, "zls initialized", .{});
} else if (std.mem.eql(u8, method, "shutdown")) { } else if (std.mem.eql(u8, method, "shutdown")) {
keep_running.* = false; keep_running.* = false;
// Technically we shoudl deinitialize first and send possible errors to the client // Technically we shoudl deinitialize first and send possible errors to the client
@ -1025,7 +1076,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const content_changes = params.getValue("contentChanges").?.Array; const content_changes = params.getValue("contentChanges").?.Array;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to change non existent document {}", .{uri}); std.log.debug(.main, "Trying to change non existent document {}", .{uri});
return; return;
}; };
@ -1037,7 +1088,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const text_document = params.getValue("textDocument").?.Object; const text_document = params.getValue("textDocument").?.Object;
const uri = text_document.getValue("uri").?.String; const uri = text_document.getValue("uri").?.String;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to save non existent document {}", .{uri}); std.log.debug(.main, "Trying to save non existent document {}", .{uri});
return; return;
}; };
@ -1060,7 +1111,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const this_config = configFromUriOr(uri, config); const this_config = configFromUriOr(uri, config);
if (this_config.enable_semantic_tokens) { if (this_config.enable_semantic_tokens) {
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to complete in non existent document {}", .{uri}); std.log.debug(.main, "Trying to complete in non existent document {}", .{uri});
return try respondGeneric(id, no_semantic_tokens_response); return try respondGeneric(id, no_semantic_tokens_response);
}; };
@ -1083,7 +1134,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const position = params.getValue("position").?.Object; const position = params.getValue("position").?.Object;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to complete in non existent document {}", .{uri}); std.log.debug(.main, "Trying to complete in non existent document {}", .{uri});
return try respondGeneric(id, no_completions_response); return try respondGeneric(id, no_completions_response);
}; };
@ -1149,7 +1200,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const position = params.getValue("position").?.Object; const position = params.getValue("position").?.Object;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to got to definition in non existent document {}", .{uri}); std.log.debug(.main, "Trying to got to definition in non existent document {}", .{uri});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
}; };
@ -1179,7 +1230,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const position = params.getValue("position").?.Object; const position = params.getValue("position").?.Object;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to got to definition in non existent document {}", .{uri}); std.log.debug(.main, "Trying to got to definition in non existent document {}", .{uri});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
}; };
@ -1206,7 +1257,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const uri = document.getValue("uri").?.String; const uri = document.getValue("uri").?.String;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to got to definition in non existent document {}", .{uri}); std.log.debug(.main, "Trying to got to definition in non existent document {}", .{uri});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
}; };
@ -1218,7 +1269,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
const uri = document.getValue("uri").?.String; const uri = document.getValue("uri").?.String;
const handle = document_store.getHandle(uri) orelse { const handle = document_store.getHandle(uri) orelse {
std.debug.print("Trying to got to definition in non existent document {}", .{uri}); std.log.debug(.main, "Trying to got to definition in non existent document {}", .{uri});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
}; };
@ -1228,7 +1279,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
process.stdout_behavior = .Pipe; process.stdout_behavior = .Pipe;
process.spawn() catch |err| { process.spawn() catch |err| {
std.debug.print("Failed to spawn zig fmt process, error: {}\n", .{err}); std.log.debug(.main, "Failed to spawn zig fmt process, error: {}\n", .{err});
return try respondGeneric(id, null_result_response); return try respondGeneric(id, null_result_response);
}; };
try process.stdin.?.writeAll(handle.document.text); try process.stdin.?.writeAll(handle.document.text);
@ -1271,10 +1322,10 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
// TODO: Unimplemented methods, implement them and add them to server capabilities. // TODO: Unimplemented methods, implement them and add them to server capabilities.
try respondGeneric(id, null_result_response); try respondGeneric(id, null_result_response);
} else if (root.Object.getValue("id")) |_| { } else if (root.Object.getValue("id")) |_| {
std.debug.print("Method with return value not implemented: {}", .{method}); std.log.debug(.main, "Method with return value not implemented: {}", .{method});
try respondGeneric(id, not_implemented_response); try respondGeneric(id, not_implemented_response);
} else { } else {
std.debug.print("Method without return value not implemented: {}", .{method}); std.log.debug(.main, "Method without return value not implemented: {}", .{method});
} }
} }
@ -1296,8 +1347,8 @@ pub fn main() anyerror!void {
} }
defer if (debug_alloc) |dbg| { defer if (debug_alloc) |dbg| {
std.debug.print("Finished cleanup, last allocation info.\n", .{}); std.log.debug(.main, "Finished cleanup, last allocation info.\n", .{});
std.debug.print("{}\n", .{dbg.info}); std.log.debug(.main, "{}\n", .{dbg.info});
}; };
// Init global vars // Init global vars
@ -1339,12 +1390,12 @@ pub fn main() anyerror!void {
break :find_zig; break :find_zig;
} }
std.debug.print("zig path `{}` is not absolute, will look in path\n", .{exe_path}); std.log.debug(.main, "zig path `{}` is not absolute, will look in path\n", .{exe_path});
} }
const env_path = std.process.getEnvVarOwned(allocator, "PATH") catch |err| switch (err) { const env_path = std.process.getEnvVarOwned(allocator, "PATH") catch |err| switch (err) {
error.EnvironmentVariableNotFound => { error.EnvironmentVariableNotFound => {
std.debug.print("Could not get PATH.\n", .{}); std.log.debug(.main, "Could not get PATH.\n", .{});
break :find_zig; break :find_zig;
}, },
else => return err, else => return err,
@ -1365,21 +1416,21 @@ pub fn main() anyerror!void {
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
zig_exe_path = std.os.realpath(full_path, &buf) catch continue; zig_exe_path = std.os.realpath(full_path, &buf) catch continue;
std.debug.print("Found zig in PATH: {}\n", .{zig_exe_path}); std.log.debug(.main, "Found zig in PATH: {}\n", .{zig_exe_path});
break :find_zig; break :find_zig;
} }
} }
if (zig_exe_path) |exe_path| { if (zig_exe_path) |exe_path| {
config.zig_exe_path = exe_path; config.zig_exe_path = exe_path;
std.debug.print("Using zig executable {}\n", .{exe_path}); std.log.debug(.main, "Using zig executable {}\n", .{exe_path});
if (config.zig_lib_path == null) { if (config.zig_lib_path == null) {
// Set the lib path relative to the executable path. // Set the lib path relative to the executable path.
config.zig_lib_path = try std.fs.path.resolve(allocator, &[_][]const u8{ config.zig_lib_path = try std.fs.path.resolve(allocator, &[_][]const u8{
std.fs.path.dirname(exe_path).?, "./lib/zig", std.fs.path.dirname(exe_path).?, "./lib/zig",
}); });
std.debug.print("Resolved standard library from executable: {}\n", .{config.zig_lib_path}); std.log.debug(.main, "Resolved standard library from executable: {}\n", .{config.zig_lib_path});
} }
} }
@ -1405,7 +1456,7 @@ pub fn main() anyerror!void {
var keep_running = true; var keep_running = true;
while (keep_running) { while (keep_running) {
const headers = readRequestHeader(allocator, reader) catch |err| { const headers = readRequestHeader(allocator, reader) catch |err| {
std.debug.print("{}; exiting!", .{@errorName(err)}); std.log.debug(.main, "{}; exiting!", .{@errorName(err)});
return; return;
}; };
defer headers.deinit(allocator); defer headers.deinit(allocator);
@ -1416,7 +1467,7 @@ pub fn main() anyerror!void {
json_parser.reset(); json_parser.reset();
if (debug_alloc) |dbg| { if (debug_alloc) |dbg| {
std.debug.print("{}\n", .{dbg.info}); std.log.debug(.main, "{}\n", .{dbg.info});
} }
} }
} }

View File

@ -144,6 +144,10 @@ const GapHighlighter = struct {
} }
fn next(self: *GapHighlighter, node: *ast.Node) !void { fn next(self: *GapHighlighter, node: *ast.Node) !void {
if (self.current_idx > 0 and self.builder.handle.tree.token_ids[self.current_idx - 1] == .ContainerDocComment) {
try self.handleTok(self.current_idx - 1);
}
var i = self.current_idx; var i = self.current_idx;
while (i < node.firstToken()) : (i += 1) { while (i < node.firstToken()) : (i += 1) {
try self.handleTok(i); try self.handleTok(i);

View File

@ -111,7 +111,7 @@ pub const MessageType = enum(Integer) {
/// Params for a LogMessage Notification (window/logMessage) /// Params for a LogMessage Notification (window/logMessage)
pub const LogMessageParams = struct { pub const LogMessageParams = struct {
@"type": MessageType, type: MessageType,
message: String message: String
}; };
@ -361,6 +361,6 @@ pub const DocumentSymbol = struct {
}; };
pub const ShowMessageParams = struct { pub const ShowMessageParams = struct {
@"type": MessageType, type: MessageType,
message: String message: String
}; };