Improvements

This commit is contained in:
Alexandros Naskos 2020-06-16 18:49:31 +03:00
parent fd424277ab
commit a45939f8ab
2 changed files with 38 additions and 33 deletions

View File

@ -23,12 +23,13 @@ const ClientCapabilities = struct {
supports_semantic_tokens: bool = false, supports_semantic_tokens: bool = false,
hover_supports_md: bool = false, hover_supports_md: bool = false,
completion_doc_supports_md: bool = false, completion_doc_supports_md: bool = false,
supports_workspace_folders: bool = false,
}; };
var client_capabilities = ClientCapabilities{}; var client_capabilities = ClientCapabilities{};
const initialize_response = const initialize_response =
\\,"result": {"capabilities": {"signatureHelpProvider": {"triggerCharacters": ["(",","]},"textDocumentSync": 1,"completionProvider": {"resolveProvider": false,"triggerCharacters": [".",":","@"]},"documentHighlightProvider": false,"hoverProvider": true,"codeActionProvider": false,"declarationProvider": true,"definitionProvider": true,"typeDefinitionProvider": true,"implementationProvider": false,"referencesProvider": false,"documentSymbolProvider": true,"colorProvider": false,"documentFormattingProvider": false,"documentRangeFormattingProvider": false,"foldingRangeProvider": false,"selectionRangeProvider": false,"workspaceSymbolProvider": false,"rangeProvider": false,"documentProvider": true,"workspace": {"workspaceFolders": {"supported": true,"changeNotifications": true}},"semanticTokensProvider": {"documentProvider": true,"legend": {"tokenTypes": ["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator","builtin"],"tokenModifiers": ["definition","async","documentation"]}}}}} \\,"result": {"capabilities": {"signatureHelpProvider": {"triggerCharacters": ["(",","]},"textDocumentSync": 1,"completionProvider": {"resolveProvider": false,"triggerCharacters": [".",":","@"]},"documentHighlightProvider": false,"hoverProvider": true,"codeActionProvider": false,"declarationProvider": true,"definitionProvider": true,"typeDefinitionProvider": true,"implementationProvider": false,"referencesProvider": false,"documentSymbolProvider": true,"colorProvider": false,"documentFormattingProvider": false,"documentRangeFormattingProvider": false,"foldingRangeProvider": false,"selectionRangeProvider": false,"workspaceSymbolProvider": false,"rangeProvider": false,"documentProvider": true,"workspace": {"workspaceFolders": {"supported": true,"changeNotifications": true}},"semanticTokensProvider": {"documentProvider": true,"legend": {"tokenTypes": ["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator","builtin"],"tokenModifiers": ["definition","async","documentation", "generic"]}}}}}
; ;
const not_implemented_response = const not_implemented_response =
@ -60,7 +61,7 @@ fn send(reqOrRes: var) !void {
defer arena.deinit(); defer arena.deinit();
var arr = std.ArrayList(u8).init(&arena.allocator); var arr = std.ArrayList(u8).init(&arena.allocator);
try std.json.stringify(reqOrRes, std.json.StringifyOptions{}, arr.outStream()); try std.json.stringify(reqOrRes, .{}, arr.outStream());
const stdout_stream = stdout.outStream(); const stdout_stream = stdout.outStream();
try stdout_stream.print("Content-Length: {}\r\n\r\n", .{arr.items.len}); try stdout_stream.print("Content-Length: {}\r\n\r\n", .{arr.items.len});
@ -851,22 +852,6 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
else => types.RequestId{ .Integer = 0 }, else => types.RequestId{ .Integer = 0 },
} else types.RequestId{ .Integer = 0 }; } else types.RequestId{ .Integer = 0 };
if (id == .Integer and id.Integer == 1337 and (root.Object.getValue("method") == null or std.mem.eql(u8, root.Object.getValue("method").?.String, ""))) {
if (root.Object.getValue("result")) |result_obj| {
if (result_obj == .Array) {
const result = result_obj.Array;
for (result.items) |workspace_folder| {
const duped_uri = try std.mem.dupe(allocator, u8, workspace_folder.Object.getValue("uri").?.String);
try workspace_folder_configs.putNoClobber(duped_uri, null);
}
}
}
try loadWorkspaceConfigs();
return;
}
std.debug.assert(root.Object.getValue("method") != null); std.debug.assert(root.Object.getValue("method") != null);
const method = root.Object.getValue("method").?.String; const method = root.Object.getValue("method").?.String;
@ -880,6 +865,12 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
if (std.mem.eql(u8, method, "initialize")) { if (std.mem.eql(u8, method, "initialize")) {
const params = root.Object.getValue("params").?.Object; const params = root.Object.getValue("params").?.Object;
const client_capabs = params.getValue("capabilities").?.Object; const client_capabs = params.getValue("capabilities").?.Object;
if (client_capabs.getValue("workspace")) |workspace_capabs| {
if (workspace_capabs.Object.getValue("workspaceFolders")) |folders_capab| {
client_capabilities.supports_workspace_folders = folders_capab.Bool;
}
}
if (client_capabs.getValue("textDocument")) |text_doc_capabs| { if (client_capabs.getValue("textDocument")) |text_doc_capabs| {
if (text_doc_capabs.Object.getValue("semanticTokens")) |_| { if (text_doc_capabs.Object.getValue("semanticTokens")) |_| {
client_capabilities.supports_semantic_tokens = true; client_capabilities.supports_semantic_tokens = true;
@ -911,15 +902,27 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
} }
} }
if (params.getValue("workspaceFolders")) |workspace_folders| {
switch (workspace_folders) {
.Array => |folders| {
std.debug.warn("Got workspace folders in initialization.\n", .{});
for (folders.items) |workspace_folder| {
const folder_uri = workspace_folder.Object.getValue("uri").?.String;
std.debug.warn("Loaded folder {}\n", .{folder_uri});
const duped_uri = try std.mem.dupe(allocator, u8, folder_uri);
try workspace_folder_configs.putNoClobber(duped_uri, null);
}
try loadWorkspaceConfigs();
},
else => {},
}
}
std.debug.warn("{}\n", .{client_capabilities}); std.debug.warn("{}\n", .{client_capabilities});
try respondGeneric(id, initialize_response); try respondGeneric(id, initialize_response);
} else if (std.mem.eql(u8, method, "initialized")) { } else if (std.mem.eql(u8, method, "initialized")) {
// Send the workspaceFolders request // All gucci
try send(types.Request{
.id = .{ .Integer = 1337 },
.method = "workspace/workspaceFolders",
.params = {},
});
} else if (std.mem.eql(u8, method, "$/cancelRequest")) { } else if (std.mem.eql(u8, method, "$/cancelRequest")) {
// noop // noop
} }

View File

@ -25,17 +25,14 @@ const TokenModifiers = packed struct {
definition: bool = false, definition: bool = false,
@"async": bool = false, @"async": bool = false,
documentation: bool = false, documentation: bool = false,
generic: bool = false,
fn toInt(self: TokenModifiers) u32 { fn toInt(self: TokenModifiers) u32 {
return @as(u32, @bitCast(u3, self)); return @as(u32, @bitCast(u4, self));
} }
fn with(lhs: TokenModifiers, rhs: TokenModifiers) TokenModifiers { inline fn set(self: *TokenModifiers, comptime field: []const u8) void {
return fromInt(toInt(lhs) | toInt(rhs)); @field(self, field) = true;
}
fn intersect(lhs: TokenModifiers, rhs: TokenModifiers) TokenModifiers {
return fromInt(toInt(lhs) & toInt(rhs));
} }
}; };
@ -111,6 +108,7 @@ pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentSto
// TODO We only scan tokens for now, we need to actually do semantic analysis // TODO We only scan tokens for now, we need to actually do semantic analysis
for (handle.tree.token_ids) |token_id, token_idx| { for (handle.tree.token_ids) |token_id, token_idx| {
var token_mod = TokenModifiers{};
const token_type: TokenType = switch (token_id) { const token_type: TokenType = switch (token_id) {
.StringLiteral, .MultilineStringLiteralLine, .CharLiteral => .string, .StringLiteral, .MultilineStringLiteralLine, .CharLiteral => .string,
.Builtin => .builtin, .Builtin => .builtin,
@ -158,7 +156,11 @@ pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentSto
.AngleBracketAngleBracketRightEqual, .AngleBracketAngleBracketRightEqual,
.Tilde, .Tilde,
=> .operator, => .operator,
.LineComment, .DocComment, .ContainerDocComment => .comment, .LineComment, .ContainerDocComment => .comment,
.DocComment => block: {
token_mod.set("documentation");
break :block .comment;
},
.Keyword_align, .Keyword_align,
.Keyword_allowzero, .Keyword_allowzero,
.Keyword_and, .Keyword_and,
@ -214,7 +216,7 @@ pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentSto
else => continue, else => continue,
}; };
try builder.add(token_idx, token_type, TokenModifiers{}); try builder.add(token_idx, token_type, token_mod);
} }
return builder.toOwnedSlice(); return builder.toOwnedSlice();