Completion of global values, and some reformatting
This commit is contained in:
parent
a0ff26cc8f
commit
ce443ba1dd
21
LICENSE.md
Normal file
21
LICENSE.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020 Auguste Rame
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
105
src/data.zig
Normal file
105
src/data.zig
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
// You can use `[...$("#toc-Builtin-Functions").parentElement.lastElementChild.children].map(_ => `"${_.innerText}"`).join(",\n")` to get all of the builtin functions on the documentation page.
|
||||||
|
/// Builtin functions
|
||||||
|
pub const builtins = [_][]const u8{
|
||||||
|
"@addWithOverflow",
|
||||||
|
"@alignCast",
|
||||||
|
"@alignOf",
|
||||||
|
"@as",
|
||||||
|
"@asyncCall",
|
||||||
|
"@atomicLoad",
|
||||||
|
"@atomicRmw",
|
||||||
|
"@atomicStore",
|
||||||
|
"@bitCast",
|
||||||
|
"@bitOffsetOf",
|
||||||
|
"@boolToInt",
|
||||||
|
"@bitSizeOf",
|
||||||
|
"@breakpoint",
|
||||||
|
"@mulAdd",
|
||||||
|
"@byteSwap",
|
||||||
|
"@bitReverse",
|
||||||
|
"@byteOffsetOf",
|
||||||
|
"@call",
|
||||||
|
"@cDefine",
|
||||||
|
"@cImport",
|
||||||
|
"@cInclude",
|
||||||
|
"@clz",
|
||||||
|
"@cmpxchgStrong",
|
||||||
|
"@cmpxchgWeak",
|
||||||
|
"@compileError",
|
||||||
|
"@compileLog",
|
||||||
|
"@ctz",
|
||||||
|
"@cUndef",
|
||||||
|
"@divExact",
|
||||||
|
"@divFloor",
|
||||||
|
"@divTrunc",
|
||||||
|
"@embedFile",
|
||||||
|
"@enumToInt",
|
||||||
|
"@errorName",
|
||||||
|
"@errorReturnTrace",
|
||||||
|
"@errorToInt",
|
||||||
|
"@errSetCast",
|
||||||
|
"@export",
|
||||||
|
"@fence",
|
||||||
|
"@field",
|
||||||
|
"@fieldParentPtr",
|
||||||
|
"@floatCast",
|
||||||
|
"@floatToInt",
|
||||||
|
"@frame",
|
||||||
|
"@Frame",
|
||||||
|
"@frameAddress",
|
||||||
|
"@frameSize",
|
||||||
|
"@hasDecl",
|
||||||
|
"@hasField",
|
||||||
|
"@import",
|
||||||
|
"@intCast",
|
||||||
|
"@intToEnum",
|
||||||
|
"@intToError",
|
||||||
|
"@intToFloat",
|
||||||
|
"@intToPtr",
|
||||||
|
"@memcpy",
|
||||||
|
"@memset",
|
||||||
|
"@mod",
|
||||||
|
"@mulWithOverflow",
|
||||||
|
"@OpaqueType",
|
||||||
|
"@panic",
|
||||||
|
"@popCount",
|
||||||
|
"@ptrCast",
|
||||||
|
"@ptrToInt",
|
||||||
|
"@rem",
|
||||||
|
"@returnAddress",
|
||||||
|
"@setAlignStack",
|
||||||
|
"@setCold",
|
||||||
|
"@setEvalBranchQuota",
|
||||||
|
"@setFloatMode",
|
||||||
|
"@setRuntimeSafety",
|
||||||
|
"@shlExact",
|
||||||
|
"@shlWithOverflow",
|
||||||
|
"@shrExact",
|
||||||
|
"@shuffle",
|
||||||
|
"@sizeOf",
|
||||||
|
"@splat",
|
||||||
|
"@sqrt",
|
||||||
|
"@sin",
|
||||||
|
"@cos",
|
||||||
|
"@exp",
|
||||||
|
"@exp2",
|
||||||
|
"@log",
|
||||||
|
"@log2",
|
||||||
|
"@log10",
|
||||||
|
"@fabs",
|
||||||
|
"@floor",
|
||||||
|
"@ceil",
|
||||||
|
"@trunc",
|
||||||
|
"@round",
|
||||||
|
"@subWithOverflow",
|
||||||
|
"@tagName",
|
||||||
|
"@TagType",
|
||||||
|
"@This",
|
||||||
|
"@truncate",
|
||||||
|
"@Type",
|
||||||
|
"@typeInfo",
|
||||||
|
"@typeName",
|
||||||
|
"@TypeOf",
|
||||||
|
"@unionInit",
|
||||||
|
"@Vector"
|
||||||
|
};
|
282
src/main.zig
282
src/main.zig
@ -1,12 +1,18 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const Uri = @import("uri.zig");
|
const Uri = @import("uri.zig");
|
||||||
|
const data = @import("data.zig");
|
||||||
|
const types = @import("types.zig");
|
||||||
|
const analysis = @import("analysis.zig");
|
||||||
|
|
||||||
// Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig
|
// Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig
|
||||||
|
|
||||||
var stdout: std.fs.File.OutStream = undefined;
|
var stdout: std.fs.File.OutStream = undefined;
|
||||||
var allocator: *std.mem.Allocator = undefined;
|
var allocator: *std.mem.Allocator = undefined;
|
||||||
|
|
||||||
const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}}
|
/// Documents hashmap, types.DocumentUri:types.TextDocument
|
||||||
|
var documents: std.StringHashMap(types.TextDocument) = undefined;
|
||||||
|
|
||||||
|
const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}}
|
||||||
;
|
;
|
||||||
|
|
||||||
const not_implemented_response = \\,"error":{"code":-32601,"message":"NotImplemented"}}
|
const not_implemented_response = \\,"error":{"code":-32601,"message":"NotImplemented"}}
|
||||||
@ -23,23 +29,29 @@ const edit_not_applied_response = \\,"result":{"applied":false,"failureReason":"
|
|||||||
const no_completions_response = \\,"result":{"isIncomplete":false,"items":[]}}
|
const no_completions_response = \\,"result":{"isIncomplete":false,"items":[]}}
|
||||||
;
|
;
|
||||||
|
|
||||||
|
/// Sends a request or response
|
||||||
|
pub fn send(reqOrRes: var) !void {
|
||||||
|
// The most memory we'll probably need
|
||||||
|
var mem_buffer: [1024 * 128]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&mem_buffer);
|
||||||
|
try std.json.stringify(reqOrRes, std.json.StringifyOptions{}, fbs.outStream());
|
||||||
|
_ = try stdout.print("Content-Length: {}\r\n\r\n", .{fbs.pos});
|
||||||
|
_ = try stdout.write(fbs.getWritten());
|
||||||
|
}
|
||||||
|
|
||||||
pub fn log(comptime fmt: []const u8, args: var) !void {
|
pub fn log(comptime fmt: []const u8, args: var) !void {
|
||||||
// Don't need much memory for log messages. This is a bad approach, but it's quick and easy and I wrote this code in ~1 minute.
|
var message = try std.fmt.allocPrint(allocator, fmt, args);
|
||||||
var buffer: []u8 = try allocator.alloc(u8, 100);
|
defer allocator.free(message);
|
||||||
defer allocator.free(buffer);
|
|
||||||
var bstream = std.io.fixedBufferStream(buffer);
|
|
||||||
var stream = bstream.outStream();
|
|
||||||
|
|
||||||
_ = try stream.write(
|
try send(types.Notification{
|
||||||
\\{"jsonrpc":"2.0","method":"window/logMessage","params":{"type": 4, "message": "
|
.method = "window/logMessage",
|
||||||
);
|
.params = types.NotificationParams{
|
||||||
_ = try stream.print(fmt, args);
|
.LogMessageParams = types.LogMessageParams{
|
||||||
_ = try stream.write(
|
.@"type" = types.MessageType.Log,
|
||||||
\\"}}
|
.message = message
|
||||||
);
|
}
|
||||||
|
}
|
||||||
_ = try stdout.print("Content-Length: {}\r\n\r\n", .{bstream.pos});
|
});
|
||||||
_ = try stdout.write(bstream.getWritten());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn respondGeneric(id: i64, response: []const u8) !void {
|
pub fn respondGeneric(id: i64, response: []const u8) !void {
|
||||||
@ -57,62 +69,105 @@ pub fn respondGeneric(id: i64, response: []const u8) !void {
|
|||||||
_ = try stdout.write(response);
|
_ = try stdout.write(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn processSource(uri: []const u8, source: []const u8) !void {
|
pub fn openDocument(uri: []const u8, text: []const u8) !void {
|
||||||
|
const du = try std.mem.dupe(allocator, u8, uri);
|
||||||
|
_ = try documents.put(du, types.TextDocument{
|
||||||
|
.uri = du,
|
||||||
|
.text = try std.mem.dupe(allocator, u8, text)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try log("An error, cool", .{});
|
pub fn publishDiagnostics(document: types.TextDocument) !void {
|
||||||
|
const tree = try std.zig.parse(allocator, document.text);
|
||||||
const tree = try std.zig.parse(allocator, source);
|
|
||||||
defer tree.deinit();
|
defer tree.deinit();
|
||||||
|
|
||||||
var buffer: []u8 = try allocator.alloc(u8, 4096);
|
var diagnostics = std.ArrayList(types.Diagnostic).init(allocator);
|
||||||
defer allocator.free(buffer);
|
|
||||||
// var buffer = try std.ArrayListSentineled(u8, 0).initSize(allocator, 0);
|
|
||||||
// defer buffer.deinit();
|
|
||||||
var bstream = std.io.fixedBufferStream(buffer);
|
|
||||||
var stream = bstream.outStream();
|
|
||||||
|
|
||||||
_ = try stream.write(
|
|
||||||
\\{"jsonrpc":"2.0","method":"textDocument/publishDiagnostics","params":{"uri":
|
|
||||||
);
|
|
||||||
_ = try stream.print("\"{}\",\"diagnostics\":[", .{uri});
|
|
||||||
|
|
||||||
if (tree.errors.len > 0) {
|
if (tree.errors.len > 0) {
|
||||||
var index: usize = 0;
|
var index: usize = 0;
|
||||||
while (index < tree.errors.len) : (index += 1) {
|
while (index < tree.errors.len) : (index += 1) {
|
||||||
|
|
||||||
const err = tree.errors.at(index);
|
const err = tree.errors.at(index);
|
||||||
const loc = tree.tokenLocation(0, err.loc());
|
const loc = tree.tokenLocation(0, err.loc());
|
||||||
|
|
||||||
_ = try stream.write(
|
var mem_buffer: [256]u8 = undefined;
|
||||||
\\{"range":{"start":{
|
var fbs = std.io.fixedBufferStream(&mem_buffer);
|
||||||
);
|
_ = try tree.renderError(err, fbs.outStream());
|
||||||
_ = try stream.print("\"line\":{},\"character\":{}", .{loc.line, loc.column});
|
|
||||||
_ = try stream.write(
|
|
||||||
\\},"end":{
|
|
||||||
);
|
|
||||||
_ = try stream.print("\"line\":{},\"character\":{}", .{loc.line, loc.column});
|
|
||||||
_ = try stream.write(
|
|
||||||
\\}},"severity":1,"source":"zig-lsp","message":"
|
|
||||||
);
|
|
||||||
_ = try tree.renderError(err, stream);
|
|
||||||
_ = try stream.print("\",\"code\":\"{}\"", .{@tagName(err.*)});
|
|
||||||
_ = try stream.write(
|
|
||||||
\\,"relatedInformation":[]}
|
|
||||||
);
|
|
||||||
if (index != tree.errors.len - 1) {
|
|
||||||
_ = try stream.writeByte(',');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
try diagnostics.append(types.Diagnostic{
|
||||||
|
.range = types.Range{
|
||||||
|
.start = types.Position{
|
||||||
|
.line = @intCast(i64, loc.line),
|
||||||
|
.character = @intCast(i64, loc.column)
|
||||||
|
},
|
||||||
|
.end = types.Position{
|
||||||
|
.line = @intCast(i64, loc.line),
|
||||||
|
.character = @intCast(i64, loc.column)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.severity = types.DiagnosticSeverity.Error,
|
||||||
|
.code = @tagName(err.*),
|
||||||
|
.source = "zls",
|
||||||
|
.message = fbs.getWritten(),
|
||||||
|
// .relatedInformation = undefined
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = try stream.write(
|
try send(types.Notification{
|
||||||
\\]}}
|
.method = "textDocument/publishDiagnostics",
|
||||||
);
|
.params = types.NotificationParams{
|
||||||
|
.PublishDiagnosticsParams = types.PublishDiagnosticsParams{
|
||||||
|
.uri = document.uri,
|
||||||
|
.diagnostics = diagnostics.toOwnedSlice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
_ = try stdout.print("Content-Length: {}\r\n\r\n", .{bstream.pos});
|
pub fn completeGlobal(id: i64, document: types.TextDocument) !void {
|
||||||
_ = try stdout.write(bstream.getWritten());
|
const tree = try std.zig.parse(allocator, document.text);
|
||||||
|
defer tree.deinit();
|
||||||
|
|
||||||
|
if (tree.errors.len > 0) return try respondGeneric(id, no_completions_response);
|
||||||
|
|
||||||
|
var completions = std.ArrayList(types.CompletionItem).init(allocator);
|
||||||
|
|
||||||
|
// try log("{}", .{&tree.root_node.decls});
|
||||||
|
var decls = tree.root_node.decls.iterator(0);
|
||||||
|
while (decls.next()) |decl_ptr| {
|
||||||
|
|
||||||
|
var decl = decl_ptr.*;
|
||||||
|
switch (decl.id) {
|
||||||
|
.FnProto => {
|
||||||
|
const func = decl.cast(std.zig.ast.Node.FnProto).?;
|
||||||
|
// if (std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return func;
|
||||||
|
try completions.append(types.CompletionItem{
|
||||||
|
.label = tree.tokenSlice(func.name_token.?),
|
||||||
|
.kind = types.CompletionItemKind.Function,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
.VarDecl => {
|
||||||
|
const vari = decl.cast(std.zig.ast.Node.VarDecl).?;
|
||||||
|
// if (std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return func;
|
||||||
|
try completions.append(types.CompletionItem{
|
||||||
|
.label = tree.tokenSlice(vari.name_token),
|
||||||
|
.kind = types.CompletionItemKind.Variable,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
else => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
try send(types.Response{
|
||||||
|
.id = .{.Integer = id},
|
||||||
|
.result = types.ResponseParams{
|
||||||
|
.CompletionList = types.CompletionList{
|
||||||
|
.isIncomplete = false,
|
||||||
|
.items = completions.toOwnedSlice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn signature
|
// pub fn signature
|
||||||
@ -125,6 +180,8 @@ pub fn processJsonRpc(json: []const u8) !void {
|
|||||||
|
|
||||||
const root = tree.root;
|
const root = tree.root;
|
||||||
|
|
||||||
|
// if (root.Object.getValue("method") == null) {return;}
|
||||||
|
|
||||||
const method = root.Object.getValue("method").?.String;
|
const method = root.Object.getValue("method").?.String;
|
||||||
const id = if (root.Object.getValue("id")) |id| id.Integer else 0;
|
const id = if (root.Object.getValue("id")) |id| id.Integer else 0;
|
||||||
|
|
||||||
@ -144,30 +201,107 @@ pub fn processJsonRpc(json: []const u8) !void {
|
|||||||
const uri = document.getValue("uri").?.String;
|
const uri = document.getValue("uri").?.String;
|
||||||
const text = document.getValue("text").?.String;
|
const text = document.getValue("text").?.String;
|
||||||
|
|
||||||
try processSource(uri, text);
|
try openDocument(uri, text);
|
||||||
|
try publishDiagnostics(documents.getValue(uri).?);
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
||||||
const document = params.getValue("textDocument").?.Object;
|
const text_document = params.getValue("textDocument").?.Object;
|
||||||
const uri = document.getValue("uri").?.String;
|
const uri = text_document.getValue("uri").?.String;
|
||||||
const text = params.getValue("contentChanges").?.Array.items[0].Object.getValue("text").?.String;
|
|
||||||
|
|
||||||
try processSource(uri, text);
|
var document = &(documents.get(uri).?.value);
|
||||||
|
const content_changes = params.getValue("contentChanges").?.Array;
|
||||||
|
// const text = content_changes.items[0].Object.getValue("text").?.String
|
||||||
|
|
||||||
|
for (content_changes.items) |change| {
|
||||||
|
if (change.Object.getValue("range")) |range| {
|
||||||
|
const start_pos = types.Position{
|
||||||
|
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer,
|
||||||
|
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer
|
||||||
|
};
|
||||||
|
const end_pos = types.Position{
|
||||||
|
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer,
|
||||||
|
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer
|
||||||
|
};
|
||||||
|
|
||||||
|
const before = document.text[0..try document.positionToIndex(start_pos)];
|
||||||
|
const after = document.text[try document.positionToIndex(end_pos)..document.text.len];
|
||||||
|
allocator.free(document.text);
|
||||||
|
document.text = try std.mem.concat(allocator, u8, &[3][]const u8{ before, change.Object.getValue("text").?.String, after });
|
||||||
|
} else {
|
||||||
|
allocator.free(document.text);
|
||||||
|
document.text = try std.mem.dupe(allocator, u8, change.Object.getValue("text").?.String);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try publishDiagnostics(document.*);
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
||||||
// noop
|
// noop
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
||||||
// noop
|
const document = params.getValue("textDocument").?.Object;
|
||||||
|
const uri = document.getValue("uri").?.String;
|
||||||
|
|
||||||
|
_ = documents.remove(uri);
|
||||||
}
|
}
|
||||||
// Autocomplete / Signatures
|
// Autocomplete / Signatures
|
||||||
else if (std.mem.eql(u8, method, "textDocument/completion")) {
|
else if (std.mem.eql(u8, method, "textDocument/completion")) {
|
||||||
try respondGeneric(id, no_completions_response);
|
const text_document = params.getValue("textDocument").?.Object;
|
||||||
|
const uri = text_document.getValue("uri").?.String;
|
||||||
|
const position = params.getValue("position").?.Object;
|
||||||
|
|
||||||
|
const document = documents.getValue(uri).?;
|
||||||
|
const pos = types.Position{
|
||||||
|
.line = position.getValue("line").?.Integer,
|
||||||
|
.character = position.getValue("character").?.Integer - 1,
|
||||||
|
};
|
||||||
|
if (pos.character >= 0) {
|
||||||
|
const pos_index = try document.positionToIndex(pos);
|
||||||
|
const char = document.text[pos_index];
|
||||||
|
|
||||||
|
if (char == '@') {
|
||||||
|
var builtin_completions: [data.builtins.len]types.CompletionItem = undefined;
|
||||||
|
|
||||||
|
for (data.builtins) |builtin, i| {
|
||||||
|
builtin_completions[i] = types.CompletionItem{
|
||||||
|
.label = builtin,
|
||||||
|
.kind = types.CompletionItemKind.Function,
|
||||||
|
.textEdit = types.TextEdit{
|
||||||
|
.range = types.Range{
|
||||||
|
.start = pos,
|
||||||
|
.end = pos,
|
||||||
|
},
|
||||||
|
.newText = builtin,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try send(types.Response{
|
||||||
|
.id = .{.Integer = id},
|
||||||
|
.result = types.ResponseParams{
|
||||||
|
.CompletionList = types.CompletionList{
|
||||||
|
.isIncomplete = false,
|
||||||
|
.items = builtin_completions[0..]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else if (char != '.') {
|
||||||
|
try completeGlobal(id, document);
|
||||||
|
} else {
|
||||||
|
try respondGeneric(id, no_completions_response);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try respondGeneric(id, no_completions_response);
|
||||||
|
}
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) {
|
} else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) {
|
||||||
|
// try respondGeneric(id,
|
||||||
|
// \\,"result":{"signatures":[{
|
||||||
|
// \\"label": "nameOfFunction(aNumber: u8)",
|
||||||
|
// \\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"},
|
||||||
|
// \\"parameters": [
|
||||||
|
// \\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}}
|
||||||
|
// \\]
|
||||||
|
// \\}]}}
|
||||||
|
// );
|
||||||
try respondGeneric(id,
|
try respondGeneric(id,
|
||||||
\\,"result":{"signatures":[{
|
\\,"result":{"signatures":[]}}
|
||||||
\\"label": "nameOfFunction(aNumber: u8)",
|
|
||||||
\\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"},
|
|
||||||
\\"parameters": [
|
|
||||||
\\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}}
|
|
||||||
\\]
|
|
||||||
\\}]}}
|
|
||||||
);
|
);
|
||||||
} else if (root.Object.getValue("id")) |_| {
|
} else if (root.Object.getValue("id")) |_| {
|
||||||
try log("Method with return value not implemented: {}", .{method});
|
try log("Method with return value not implemented: {}", .{method});
|
||||||
@ -180,18 +314,26 @@ pub fn processJsonRpc(json: []const u8) !void {
|
|||||||
|
|
||||||
pub fn main() anyerror!void {
|
pub fn main() anyerror!void {
|
||||||
|
|
||||||
|
// Init memory
|
||||||
|
|
||||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
allocator = &arena.allocator;
|
allocator = &arena.allocator;
|
||||||
|
|
||||||
|
// Init buffer for stdin read
|
||||||
|
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
defer buffer.deinit();
|
defer buffer.deinit();
|
||||||
|
|
||||||
try buffer.resize(4096);
|
try buffer.resize(4096);
|
||||||
|
|
||||||
|
// Init global vars
|
||||||
|
|
||||||
const stdin = std.io.getStdIn().inStream();
|
const stdin = std.io.getStdIn().inStream();
|
||||||
stdout = std.io.getStdOut().outStream();
|
stdout = std.io.getStdOut().outStream();
|
||||||
|
|
||||||
|
documents = std.StringHashMap(types.TextDocument).init(allocator);
|
||||||
|
|
||||||
var offset: usize = 0;
|
var offset: usize = 0;
|
||||||
var bytes_read: usize = 0;
|
var bytes_read: usize = 0;
|
||||||
|
|
||||||
|
229
src/types.zig
Normal file
229
src/types.zig
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
// Collection of JSONRPC and LSP structs, enums, and unions
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const json = std.json;
|
||||||
|
|
||||||
|
// JSON Types
|
||||||
|
|
||||||
|
pub const String = []const u8;
|
||||||
|
pub const Integer = i64;
|
||||||
|
pub const Float = f64;
|
||||||
|
pub const Bool = bool;
|
||||||
|
pub const Array = json.Array;
|
||||||
|
pub const Object = json.ObjectMap;
|
||||||
|
// pub const Any = @TypeOf(var);
|
||||||
|
|
||||||
|
// Basic structures
|
||||||
|
|
||||||
|
pub const DocumentUri = String;
|
||||||
|
|
||||||
|
pub const Position = struct {
|
||||||
|
line: Integer,
|
||||||
|
character: Integer
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Range = struct {
|
||||||
|
start: Position,
|
||||||
|
end: Position
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Location = struct {
|
||||||
|
uri: DocumentUri,
|
||||||
|
range: Range
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Id of a request
|
||||||
|
pub const RequestId = union(enum) {
|
||||||
|
String: String,
|
||||||
|
Integer: Integer,
|
||||||
|
Float: Float,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Params of a request
|
||||||
|
pub const RequestParams = union(enum) {
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const NotificationParams = union(enum) {
|
||||||
|
LogMessageParams: LogMessageParams,
|
||||||
|
PublishDiagnosticsParams: PublishDiagnosticsParams
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Params of a response (result)
|
||||||
|
pub const ResponseParams = union(enum) {
|
||||||
|
CompletionList: CompletionList
|
||||||
|
};
|
||||||
|
|
||||||
|
/// JSONRPC error
|
||||||
|
pub const Error = struct {
|
||||||
|
code: Integer,
|
||||||
|
message: String,
|
||||||
|
data: String,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// JSONRPC request
|
||||||
|
pub const Request = struct {
|
||||||
|
jsonrpc: String = "2.0",
|
||||||
|
method: String,
|
||||||
|
id: ?RequestId = RequestId{.Integer = 0},
|
||||||
|
params: RequestParams
|
||||||
|
};
|
||||||
|
|
||||||
|
/// JSONRPC notifications
|
||||||
|
pub const Notification = struct {
|
||||||
|
jsonrpc: String = "2.0",
|
||||||
|
method: String,
|
||||||
|
params: NotificationParams
|
||||||
|
};
|
||||||
|
|
||||||
|
/// JSONRPC response
|
||||||
|
pub const Response = struct {
|
||||||
|
jsonrpc: String = "2.0",
|
||||||
|
@"error": ?Error = null,
|
||||||
|
id: RequestId,
|
||||||
|
result: ResponseParams,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Type of a debug message
|
||||||
|
pub const MessageType = enum(Integer) {
|
||||||
|
Error = 1,
|
||||||
|
Warning = 2,
|
||||||
|
Info = 3,
|
||||||
|
Log = 4,
|
||||||
|
|
||||||
|
pub fn jsonStringify(
|
||||||
|
value: MessageType,
|
||||||
|
options: json.StringifyOptions,
|
||||||
|
out_stream: var,
|
||||||
|
) !void {
|
||||||
|
try json.stringify(@enumToInt(value), options, out_stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Params for a LogMessage Notification (window/logMessage)
|
||||||
|
pub const LogMessageParams = struct {
|
||||||
|
@"type": MessageType,
|
||||||
|
message: String
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const DiagnosticSeverity = enum(Integer) {
|
||||||
|
Error = 1,
|
||||||
|
Warning = 2,
|
||||||
|
Information = 3,
|
||||||
|
Hint = 4,
|
||||||
|
|
||||||
|
pub fn jsonStringify(
|
||||||
|
value: DiagnosticSeverity,
|
||||||
|
options: json.StringifyOptions,
|
||||||
|
out_stream: var,
|
||||||
|
) !void {
|
||||||
|
try json.stringify(@enumToInt(value), options, out_stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Diagnostic = struct {
|
||||||
|
range: Range,
|
||||||
|
severity: DiagnosticSeverity,
|
||||||
|
code: String,
|
||||||
|
source: String,
|
||||||
|
message: String,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PublishDiagnosticsParams = struct {
|
||||||
|
uri: DocumentUri,
|
||||||
|
diagnostics: []Diagnostic
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const TextDocument = struct {
|
||||||
|
uri: DocumentUri,
|
||||||
|
text: String,
|
||||||
|
|
||||||
|
pub fn positionToIndex(self: *const TextDocument, position: Position) !usize {
|
||||||
|
var split_iterator = std.mem.split(self.text, "\n");
|
||||||
|
|
||||||
|
var line: i64 = 0;
|
||||||
|
while (line < position.line) : (line += 1) {
|
||||||
|
_ = split_iterator.next() orelse return error.InvalidParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
var index = @intCast(i64, split_iterator.index.?) + position.character;
|
||||||
|
|
||||||
|
if (index < 0 or index >= @intCast(i64, self.text.len)) {
|
||||||
|
return error.InvalidParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
return @intCast(usize, index);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const TextEdit = struct {
|
||||||
|
range: Range,
|
||||||
|
newText: String,
|
||||||
|
};
|
||||||
|
|
||||||
|
// pub const TextDocumentIdentifier = struct {
|
||||||
|
// uri: DocumentUri,
|
||||||
|
// };
|
||||||
|
|
||||||
|
// pub const CompletionTriggerKind = enum(Integer) {
|
||||||
|
// Invoked = 1,
|
||||||
|
// TriggerCharacter = 2,
|
||||||
|
// TriggerForIncompleteCompletions = 3,
|
||||||
|
|
||||||
|
// pub fn jsonStringify(
|
||||||
|
// value: CompletionTriggerKind,
|
||||||
|
// options: json.StringifyOptions,
|
||||||
|
// out_stream: var,
|
||||||
|
// ) !void {
|
||||||
|
// try json.stringify(@enumToInt(value), options, out_stream);
|
||||||
|
// }
|
||||||
|
// };
|
||||||
|
|
||||||
|
pub const CompletionList = struct {
|
||||||
|
isIncomplete: Bool,
|
||||||
|
items: []CompletionItem,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const CompletionItemKind = enum(Integer) {
|
||||||
|
Text = 1,
|
||||||
|
Method = 2,
|
||||||
|
Function = 3,
|
||||||
|
Constructor = 4,
|
||||||
|
Field = 5,
|
||||||
|
Variable = 6,
|
||||||
|
Class = 7,
|
||||||
|
Interface = 8,
|
||||||
|
Module = 9,
|
||||||
|
Property = 10,
|
||||||
|
Unit = 11,
|
||||||
|
Value = 12,
|
||||||
|
Enum = 13,
|
||||||
|
Keyword = 14,
|
||||||
|
Snippet = 15,
|
||||||
|
Color = 16,
|
||||||
|
File = 17,
|
||||||
|
Reference = 18,
|
||||||
|
Folder = 19,
|
||||||
|
EnumMember = 20,
|
||||||
|
Constant = 21,
|
||||||
|
Struct = 22,
|
||||||
|
Event = 23,
|
||||||
|
Operator = 24,
|
||||||
|
TypeParameter = 25,
|
||||||
|
|
||||||
|
pub fn jsonStringify(
|
||||||
|
value: CompletionItemKind,
|
||||||
|
options: json.StringifyOptions,
|
||||||
|
out_stream: var,
|
||||||
|
) !void {
|
||||||
|
try json.stringify(@enumToInt(value), options, out_stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const CompletionItem = struct {
|
||||||
|
label: String,
|
||||||
|
kind: CompletionItemKind,
|
||||||
|
textEdit: ?TextEdit = null,
|
||||||
|
// filterText: String = .NotDefined,
|
||||||
|
};
|
||||||
|
|
Loading…
Reference in New Issue
Block a user