Merge branch 'zigtools:master' into master
This commit is contained in:
commit
5804454334
48
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
48
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
name: Bug Report
|
||||||
|
description: File a bug report
|
||||||
|
labels: ["bug"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
A bug is when something works differently than it is expected to.
|
||||||
|
## Remember to search before filing a new report
|
||||||
|
Please search for this bug in the issue tracker, and use a bug report title that
|
||||||
|
would have made your bug report turn up in the search results for your search query.
|
||||||
|
- type: input
|
||||||
|
id: zig-version
|
||||||
|
attributes:
|
||||||
|
label: Zig Version
|
||||||
|
description: "The output of `zig version`"
|
||||||
|
placeholder: "0.9.0-dev.1275+ac52e0056"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: zls-version
|
||||||
|
attributes:
|
||||||
|
label: Zig Language Server Version
|
||||||
|
description: "Your zls release or commit"
|
||||||
|
placeholder: "b21039d51261923c665d3bc58fadc4b4d5e221ea"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: repro
|
||||||
|
attributes:
|
||||||
|
label: Steps to Reproduce
|
||||||
|
description: What exactly can someone else do, in order to observe the problem that you observed?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: expected
|
||||||
|
attributes:
|
||||||
|
label: Expected Behavior
|
||||||
|
description: What did you expect to happen?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: actual
|
||||||
|
attributes:
|
||||||
|
label: Actual Behavior
|
||||||
|
description: What happened instead? Be sure to include all error messages if any.
|
||||||
|
validations:
|
||||||
|
required: true
|
8
.github/ISSUE_TEMPLATE/enhancement.md
vendored
Normal file
8
.github/ISSUE_TEMPLATE/enhancement.md
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
name: Improvement
|
||||||
|
about: Enhance the zls experience
|
||||||
|
labels: enhancement
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Remember to search before filing a new report
|
12
README.md
12
README.md
@ -19,7 +19,7 @@ Zig Language Server, or `zls`, is a language server for Zig. The Zig wiki states
|
|||||||
- [Updating Data Files](#updating-data-files)
|
- [Updating Data Files](#updating-data-files)
|
||||||
- [Configuration Options](#configuration-options)
|
- [Configuration Options](#configuration-options)
|
||||||
- [Features](#features)
|
- [Features](#features)
|
||||||
- [VS Code](#vscode)
|
- [VS Code](#vs-code)
|
||||||
- [Sublime Text](#sublime-text)
|
- [Sublime Text](#sublime-text)
|
||||||
- [Sublime Text 3](#sublime-text-3)
|
- [Sublime Text 3](#sublime-text-3)
|
||||||
- [Sublime Text 4](#sublime-text-4)
|
- [Sublime Text 4](#sublime-text-4)
|
||||||
@ -70,7 +70,7 @@ Building `zls` is very easy. You will need [a build of Zig master](https://zigla
|
|||||||
git clone --recurse-submodules https://github.com/zigtools/zls
|
git clone --recurse-submodules https://github.com/zigtools/zls
|
||||||
cd zls
|
cd zls
|
||||||
zig build -Drelease-safe
|
zig build -Drelease-safe
|
||||||
./zig-out/bin/zls config # Configure ZLS
|
./zig-out/bin/zls --config # Configure ZLS
|
||||||
```
|
```
|
||||||
|
|
||||||
*For detailed building instructions, see the Wiki page about [Cloning With Git](https://github.com/zigtools/zls/wiki/Downloading-and-Building-ZLS#cloning-with-git).*
|
*For detailed building instructions, see the Wiki page about [Cloning With Git](https://github.com/zigtools/zls/wiki/Downloading-and-Building-ZLS#cloning-with-git).*
|
||||||
@ -92,7 +92,7 @@ There is also a `generate-data.js` in the `src/data` folder, you'll need to run
|
|||||||
|
|
||||||
### Configuration Options
|
### Configuration Options
|
||||||
|
|
||||||
You can configure zls by running `zls config` or manually creating your own `zls.json` configuration file.
|
You can configure zls by running `zls --config` or manually creating your own `zls.json` configuration file.
|
||||||
zls will look for a zls.json configuration file in multiple locations with the following priority:
|
zls will look for a zls.json configuration file in multiple locations with the following priority:
|
||||||
- In the local configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list))
|
- In the local configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list))
|
||||||
- In the global configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list))
|
- In the global configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list))
|
||||||
@ -108,7 +108,7 @@ The following options are currently available.
|
|||||||
| `zig_exe_path` | `?[]const u8` | `null` | zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided. |
|
| `zig_exe_path` | `?[]const u8` | `null` | zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided. |
|
||||||
| `warn_style` | `bool` | `false` | Enables warnings for style *guideline* mismatches |
|
| `warn_style` | `bool` | `false` | Enables warnings for style *guideline* mismatches |
|
||||||
| `build_runner_path` | `?[]const u8` | `null` | Path to the build_runner.zig file provided by zls. `null` is equivalent to `${executable_directory}/build_runner.zig` |
|
| `build_runner_path` | `?[]const u8` | `null` | Path to the build_runner.zig file provided by zls. `null` is equivalent to `${executable_directory}/build_runner.zig` |
|
||||||
| `build_runner_cache_path` | `?[]const u8` | `null` | Path to a directroy that will be used as zig's cache when running `zig run build_runner.zig ...`. `null` is equivalent to `${KnownFloders.Cache}/zls` |
|
| `global_cache_path` | `?[]const u8` | `null` | Path to a directroy that will be used as zig's cache. `null` is equivalent to `${KnownFloders.Cache}/zls` |
|
||||||
| `enable_semantic_tokens` | `bool` | `true` | Enables semantic token support when the client also supports it. |
|
| `enable_semantic_tokens` | `bool` | `true` | Enables semantic token support when the client also supports it. |
|
||||||
| `enable_inlay_hints` | `bool` | `false` | Enables inlay hint support when the client also supports it. |
|
| `enable_inlay_hints` | `bool` | `false` | Enables inlay hint support when the client also supports it. |
|
||||||
| `operator_completions` | `bool` | `true` | Enables `*` and `?` operators in completion lists. |
|
| `operator_completions` | `bool` | `true` | Enables `*` and `?` operators in completion lists. |
|
||||||
@ -118,8 +118,8 @@ The following options are currently available.
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
`zls` supports most language features, including simple type function support, using namespace, payload capture type resolution, custom packages and others.
|
`zls` supports most language features, including simple type function support, using namespace, payload capture type resolution, custom packages, `cImport` and others.
|
||||||
Notable language features that are not currently implemented include `@cImport` as well as most forms of compile time evaluation.
|
Currently there is no support for compile time evaluation.
|
||||||
|
|
||||||
The following LSP features are supported:
|
The following LSP features are supported:
|
||||||
- Completions
|
- Completions
|
||||||
|
@ -7,6 +7,7 @@ pub fn build(b: *std.build.Builder) !void {
|
|||||||
|
|
||||||
const mode = b.standardReleaseOptions();
|
const mode = b.standardReleaseOptions();
|
||||||
const exe = b.addExecutable("zls", "src/main.zig");
|
const exe = b.addExecutable("zls", "src/main.zig");
|
||||||
|
exe.use_stage1 = true;
|
||||||
const exe_options = b.addOptions();
|
const exe_options = b.addOptions();
|
||||||
exe.addOptions("build_options", exe_options);
|
exe.addOptions("build_options", exe_options);
|
||||||
|
|
||||||
@ -76,11 +77,13 @@ pub fn build(b: *std.build.Builder) !void {
|
|||||||
test_step.dependOn(b.getInstallStep());
|
test_step.dependOn(b.getInstallStep());
|
||||||
|
|
||||||
var unit_tests = b.addTest("src/unit_tests.zig");
|
var unit_tests = b.addTest("src/unit_tests.zig");
|
||||||
|
unit_tests.use_stage1 = true;
|
||||||
unit_tests.setBuildMode(.Debug);
|
unit_tests.setBuildMode(.Debug);
|
||||||
unit_tests.setTarget(target);
|
unit_tests.setTarget(target);
|
||||||
test_step.dependOn(&unit_tests.step);
|
test_step.dependOn(&unit_tests.step);
|
||||||
|
|
||||||
var session_tests = b.addTest("tests/sessions.zig");
|
var session_tests = b.addTest("tests/sessions.zig");
|
||||||
|
session_tests.use_stage1 = true;
|
||||||
session_tests.addPackage(.{ .name = "header", .source = .{ .path = "src/header.zig" } });
|
session_tests.addPackage(.{ .name = "header", .source = .{ .path = "src/header.zig" } });
|
||||||
session_tests.addPackage(.{ .name = "server", .source = .{ .path = "src/Server.zig" }, .dependencies = exe.packages.items });
|
session_tests.addPackage(.{ .name = "server", .source = .{ .path = "src/Server.zig" }, .dependencies = exe.packages.items });
|
||||||
session_tests.setBuildMode(.Debug);
|
session_tests.setBuildMode(.Debug);
|
||||||
|
@ -32,8 +32,8 @@ warn_style: bool = false,
|
|||||||
/// Path to the build_runner.zig file.
|
/// Path to the build_runner.zig file.
|
||||||
build_runner_path: ?[]const u8 = null,
|
build_runner_path: ?[]const u8 = null,
|
||||||
|
|
||||||
/// Path to a directory that will be used as cache when `zig run`ning the build runner
|
/// Path to the global cache directory
|
||||||
build_runner_cache_path: ?[]const u8 = null,
|
global_cache_path: ?[]const u8 = null,
|
||||||
|
|
||||||
/// Semantic token support
|
/// Semantic token support
|
||||||
enable_semantic_tokens: bool = true,
|
enable_semantic_tokens: bool = true,
|
||||||
@ -208,7 +208,7 @@ pub fn configChanged(config: *Config, allocator: std.mem.Allocator, builtin_crea
|
|||||||
break :blk try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" });
|
break :blk try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" });
|
||||||
};
|
};
|
||||||
|
|
||||||
config.build_runner_cache_path = if (config.build_runner_cache_path) |p|
|
config.global_cache_path = if (config.global_cache_path) |p|
|
||||||
try allocator.dupe(u8, p)
|
try allocator.dupe(u8, p)
|
||||||
else blk: {
|
else blk: {
|
||||||
const cache_dir_path = (try known_folders.getPath(allocator, .cache)) orelse {
|
const cache_dir_path = (try known_folders.getPath(allocator, .cache)) orelse {
|
||||||
|
@ -6,21 +6,22 @@ const offsets = @import("offsets.zig");
|
|||||||
const log = std.log.scoped(.doc_store);
|
const log = std.log.scoped(.doc_store);
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const BuildAssociatedConfig = @import("BuildAssociatedConfig.zig");
|
const BuildAssociatedConfig = @import("BuildAssociatedConfig.zig");
|
||||||
|
const BuildConfig = @import("special/build_runner.zig").BuildConfig;
|
||||||
const tracy = @import("tracy.zig");
|
const tracy = @import("tracy.zig");
|
||||||
const Config = @import("Config.zig");
|
const Config = @import("Config.zig");
|
||||||
|
const translate_c = @import("translate_c.zig");
|
||||||
|
|
||||||
const DocumentStore = @This();
|
const DocumentStore = @This();
|
||||||
|
|
||||||
const BuildFile = struct {
|
pub const Hasher = std.crypto.auth.siphash.SipHash128(1, 3);
|
||||||
const Pkg = struct {
|
|
||||||
name: []const u8,
|
|
||||||
uri: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
/// Initial state, that can be copied.
|
||||||
|
pub const hasher_init: Hasher = Hasher.init(&[_]u8{0} ** Hasher.key_length);
|
||||||
|
|
||||||
|
const BuildFile = struct {
|
||||||
refs: usize,
|
refs: usize,
|
||||||
uri: []const u8,
|
uri: []const u8,
|
||||||
packages: std.ArrayListUnmanaged(Pkg),
|
config: BuildFileConfig,
|
||||||
|
|
||||||
builtin_uri: ?[]const u8 = null,
|
builtin_uri: ?[]const u8 = null,
|
||||||
|
|
||||||
pub fn destroy(self: *BuildFile, allocator: std.mem.Allocator) void {
|
pub fn destroy(self: *BuildFile, allocator: std.mem.Allocator) void {
|
||||||
@ -29,12 +30,39 @@ const BuildFile = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const BuildFileConfig = struct {
|
||||||
|
packages: []Pkg,
|
||||||
|
include_dirs: []IncludeDir,
|
||||||
|
|
||||||
|
pub fn deinit(self: BuildFileConfig, allocator: std.mem.Allocator) void {
|
||||||
|
for (self.packages) |pkg| {
|
||||||
|
allocator.free(pkg.name);
|
||||||
|
allocator.free(pkg.uri);
|
||||||
|
}
|
||||||
|
allocator.free(self.packages);
|
||||||
|
|
||||||
|
for (self.include_dirs) |dir| {
|
||||||
|
allocator.free(dir.path);
|
||||||
|
}
|
||||||
|
allocator.free(self.include_dirs);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const Pkg = struct {
|
||||||
|
name: []const u8,
|
||||||
|
uri: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const IncludeDir = BuildConfig.IncludeDir;
|
||||||
|
};
|
||||||
|
|
||||||
pub const Handle = struct {
|
pub const Handle = struct {
|
||||||
document: types.TextDocument,
|
document: types.TextDocument,
|
||||||
count: usize,
|
count: usize,
|
||||||
/// Contains one entry for every import in the document
|
/// Contains one entry for every import in the document
|
||||||
import_uris: []const []const u8,
|
import_uris: []const []const u8,
|
||||||
/// Items in this array list come from `import_uris`
|
/// Contains one entry for every cimport in the document
|
||||||
|
cimports: []CImportHandle,
|
||||||
|
/// Items in this array list come from `import_uris` and `cimports`
|
||||||
imports_used: std.ArrayListUnmanaged([]const u8),
|
imports_used: std.ArrayListUnmanaged([]const u8),
|
||||||
tree: Ast,
|
tree: Ast,
|
||||||
document_scope: analysis.DocumentScope,
|
document_scope: analysis.DocumentScope,
|
||||||
@ -110,25 +138,25 @@ fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: *B
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const LoadPackagesContext = struct {
|
const LoadBuildConfigContext = struct {
|
||||||
build_file: *BuildFile,
|
build_file: *BuildFile,
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
build_runner_path: []const u8,
|
build_runner_path: []const u8,
|
||||||
build_runner_cache_path: []const u8,
|
global_cache_path: []const u8,
|
||||||
zig_exe_path: []const u8,
|
zig_exe_path: []const u8,
|
||||||
build_file_path: ?[]const u8 = null,
|
build_file_path: ?[]const u8 = null,
|
||||||
cache_root: []const u8,
|
cache_root: []const u8,
|
||||||
global_cache_root: []const u8,
|
global_cache_root: []const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn loadPackages(context: LoadPackagesContext) !void {
|
fn loadBuildConfiguration(context: LoadBuildConfigContext) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
const allocator = context.allocator;
|
const allocator = context.allocator;
|
||||||
const build_file = context.build_file;
|
const build_file = context.build_file;
|
||||||
const build_runner_path = context.build_runner_path;
|
const build_runner_path = context.build_runner_path;
|
||||||
const build_runner_cache_path = context.build_runner_cache_path;
|
const global_cache_path = context.global_cache_path;
|
||||||
const zig_exe_path = context.zig_exe_path;
|
const zig_exe_path = context.zig_exe_path;
|
||||||
|
|
||||||
const build_file_path = context.build_file_path orelse try URI.parse(allocator, build_file.uri);
|
const build_file_path = context.build_file_path orelse try URI.parse(allocator, build_file.uri);
|
||||||
@ -140,7 +168,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
|||||||
"run",
|
"run",
|
||||||
build_runner_path,
|
build_runner_path,
|
||||||
"--cache-dir",
|
"--cache-dir",
|
||||||
build_runner_cache_path,
|
global_cache_path,
|
||||||
"--pkg-begin",
|
"--pkg-begin",
|
||||||
"@build@",
|
"@build@",
|
||||||
build_file_path,
|
build_file_path,
|
||||||
@ -167,46 +195,67 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
|||||||
defer allocator.free(joined);
|
defer allocator.free(joined);
|
||||||
|
|
||||||
log.err(
|
log.err(
|
||||||
"Failed to execute build runner to collect packages, command:\n{s}\nError: {s}",
|
"Failed to execute build runner to collect build configuration, command:\n{s}\nError: {s}",
|
||||||
.{ joined, zig_run_result.stderr },
|
.{ joined, zig_run_result.stderr },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (zig_run_result.term) {
|
switch (zig_run_result.term) {
|
||||||
.Exited => |exit_code| {
|
.Exited => |exit_code| {
|
||||||
if (exit_code == 0) {
|
if (exit_code != 0) return error.RunFailed;
|
||||||
log.debug("Finished zig run for build file {s}", .{build_file.uri});
|
|
||||||
|
|
||||||
for (build_file.packages.items) |old_pkg| {
|
const parse_options = std.json.ParseOptions{ .allocator = allocator };
|
||||||
allocator.free(old_pkg.name);
|
|
||||||
allocator.free(old_pkg.uri);
|
build_file.config.deinit(allocator);
|
||||||
|
|
||||||
|
const config: BuildConfig = std.json.parse(
|
||||||
|
BuildConfig,
|
||||||
|
&std.json.TokenStream.init(zig_run_result.stdout),
|
||||||
|
parse_options,
|
||||||
|
) catch return error.RunFailed;
|
||||||
|
defer std.json.parseFree(BuildConfig, config, parse_options);
|
||||||
|
|
||||||
|
var packages = try std.ArrayListUnmanaged(BuildFileConfig.Pkg).initCapacity(allocator, config.packages.len);
|
||||||
|
errdefer {
|
||||||
|
for (packages.items) |pkg| {
|
||||||
|
allocator.free(pkg.name);
|
||||||
|
allocator.free(pkg.uri);
|
||||||
|
}
|
||||||
|
packages.deinit(allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
build_file.packages.shrinkAndFree(allocator, 0);
|
var include_dirs = try std.ArrayListUnmanaged(BuildFileConfig.IncludeDir).initCapacity(allocator, config.include_dirs.len);
|
||||||
var line_it = std.mem.split(u8, zig_run_result.stdout, "\n");
|
errdefer {
|
||||||
while (line_it.next()) |line| {
|
for (include_dirs.items) |dir| {
|
||||||
if (std.mem.indexOfScalar(u8, line, '\x00')) |zero_byte_idx| {
|
allocator.free(dir.path);
|
||||||
const name = line[0..zero_byte_idx];
|
}
|
||||||
const rel_path = line[zero_byte_idx + 1 ..];
|
include_dirs.deinit(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ directory_path, rel_path });
|
for (config.packages) |pkg| {
|
||||||
|
const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ directory_path, pkg.path });
|
||||||
defer allocator.free(pkg_abs_path);
|
defer allocator.free(pkg_abs_path);
|
||||||
|
|
||||||
const pkg_uri = try URI.fromPath(allocator, pkg_abs_path);
|
const uri = try URI.fromPath(allocator, pkg_abs_path);
|
||||||
errdefer allocator.free(pkg_uri);
|
errdefer allocator.free(uri);
|
||||||
|
|
||||||
const duped_name = try allocator.dupe(u8, name);
|
const name = try allocator.dupe(u8, pkg.name);
|
||||||
errdefer allocator.free(duped_name);
|
errdefer allocator.free(name);
|
||||||
|
|
||||||
(try build_file.packages.addOne(allocator)).* = .{
|
packages.appendAssumeCapacity(.{ .name = name, .uri = uri });
|
||||||
.name = duped_name,
|
}
|
||||||
.uri = pkg_uri,
|
|
||||||
|
for (config.include_dirs) |dir| {
|
||||||
|
const path = try allocator.dupe(u8, dir.path);
|
||||||
|
errdefer allocator.free(path);
|
||||||
|
|
||||||
|
include_dirs.appendAssumeCapacity(.{ .path = path, .system = dir.system });
|
||||||
|
}
|
||||||
|
|
||||||
|
build_file.config = .{
|
||||||
|
.packages = packages.toOwnedSlice(allocator),
|
||||||
|
.include_dirs = include_dirs.toOwnedSlice(allocator),
|
||||||
};
|
};
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return error.RunFailed;
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
else => return error.RunFailed,
|
else => return error.RunFailed,
|
||||||
}
|
}
|
||||||
@ -232,6 +281,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
handle.* = Handle{
|
handle.* = Handle{
|
||||||
.count = 1,
|
.count = 1,
|
||||||
.import_uris = &.{},
|
.import_uris = &.{},
|
||||||
|
.cimports = &.{},
|
||||||
.imports_used = .{},
|
.imports_used = .{},
|
||||||
.document = .{
|
.document = .{
|
||||||
.uri = uri,
|
.uri = uri,
|
||||||
@ -256,7 +306,10 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
build_file.* = .{
|
build_file.* = .{
|
||||||
.refs = 1,
|
.refs = 1,
|
||||||
.uri = try self.allocator.dupe(u8, uri),
|
.uri = try self.allocator.dupe(u8, uri),
|
||||||
.packages = .{},
|
.config = .{
|
||||||
|
.packages = &.{},
|
||||||
|
.include_dirs = &.{},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const build_file_path = try URI.parse(self.allocator, build_file.uri);
|
const build_file_path = try URI.parse(self.allocator, build_file.uri);
|
||||||
@ -274,11 +327,11 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
|
|
||||||
// TODO: Do this in a separate thread?
|
// TODO: Do this in a separate thread?
|
||||||
// It can take quite long.
|
// It can take quite long.
|
||||||
loadPackages(.{
|
loadBuildConfiguration(.{
|
||||||
.build_file = build_file,
|
.build_file = build_file,
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
.build_runner_path = self.config.build_runner_path.?,
|
.build_runner_path = self.config.build_runner_path.?,
|
||||||
.build_runner_cache_path = self.config.build_runner_cache_path.?,
|
.global_cache_path = self.config.global_cache_path.?,
|
||||||
.zig_exe_path = self.config.zig_exe_path.?,
|
.zig_exe_path = self.config.zig_exe_path.?,
|
||||||
.build_file_path = build_file_path,
|
.build_file_path = build_file_path,
|
||||||
.cache_root = self.zig_cache_root,
|
.cache_root = self.zig_cache_root,
|
||||||
@ -382,6 +435,14 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
self.allocator.free(handle.import_uris);
|
self.allocator.free(handle.import_uris);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
handle.cimports = try self.collectCIncludes(handle);
|
||||||
|
errdefer {
|
||||||
|
for (handle.cimports) |item| {
|
||||||
|
self.allocator.free(item.uri);
|
||||||
|
}
|
||||||
|
self.allocator.free(handle.cimports);
|
||||||
|
}
|
||||||
|
|
||||||
try self.handles.putNoClobber(self.allocator, uri, handle);
|
try self.handles.putNoClobber(self.allocator, uri, handle);
|
||||||
return handle;
|
return handle;
|
||||||
}
|
}
|
||||||
@ -409,11 +470,8 @@ fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void {
|
|||||||
build_file.refs -= 1;
|
build_file.refs -= 1;
|
||||||
if (build_file.refs == 0) {
|
if (build_file.refs == 0) {
|
||||||
log.debug("Freeing build file {s}", .{build_file.uri});
|
log.debug("Freeing build file {s}", .{build_file.uri});
|
||||||
for (build_file.packages.items) |pkg| {
|
|
||||||
self.allocator.free(pkg.name);
|
build_file.config.deinit(self.allocator);
|
||||||
self.allocator.free(pkg.uri);
|
|
||||||
}
|
|
||||||
build_file.packages.deinit(self.allocator);
|
|
||||||
|
|
||||||
// Decrement count of the document since one count comes
|
// Decrement count of the document since one count comes
|
||||||
// from the build file existing.
|
// from the build file existing.
|
||||||
@ -456,9 +514,14 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
|||||||
self.allocator.free(import_uri);
|
self.allocator.free(import_uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (handle.cimports) |item| {
|
||||||
|
self.allocator.free(item.uri);
|
||||||
|
}
|
||||||
|
|
||||||
handle.document_scope.deinit(self.allocator);
|
handle.document_scope.deinit(self.allocator);
|
||||||
handle.imports_used.deinit(self.allocator);
|
handle.imports_used.deinit(self.allocator);
|
||||||
self.allocator.free(handle.import_uris);
|
self.allocator.free(handle.import_uris);
|
||||||
|
self.allocator.free(handle.cimports);
|
||||||
self.allocator.destroy(handle);
|
self.allocator.destroy(handle);
|
||||||
const uri_key = entry.key_ptr.*;
|
const uri_key = entry.key_ptr.*;
|
||||||
std.debug.assert(self.handles.remove(uri));
|
std.debug.assert(self.handles.remove(uri));
|
||||||
@ -475,27 +538,149 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 {
|
fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 {
|
||||||
var new_imports = std.ArrayList([]const u8).init(self.allocator);
|
const collected_imports = try analysis.collectImports(self.allocator, handle.tree);
|
||||||
|
|
||||||
|
var imports = std.ArrayList([]const u8).fromOwnedSlice(self.allocator, collected_imports);
|
||||||
errdefer {
|
errdefer {
|
||||||
for (new_imports.items) |imp| {
|
for (imports.items) |imp| {
|
||||||
self.allocator.free(imp);
|
self.allocator.free(imp);
|
||||||
}
|
}
|
||||||
new_imports.deinit();
|
imports.deinit();
|
||||||
}
|
}
|
||||||
try analysis.collectImports(&new_imports, handle.tree);
|
|
||||||
|
|
||||||
// Convert to URIs
|
// Convert to URIs
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (i < new_imports.items.len) {
|
while (i < imports.items.len) {
|
||||||
if (try self.uriFromImportStr(self.allocator, handle.*, new_imports.items[i])) |uri| {
|
if (try self.uriFromImportStr(self.allocator, handle.*, imports.items[i])) |uri| {
|
||||||
// The raw import strings are owned by the document and do not need to be freed here.
|
// The raw import strings are owned by the document and do not need to be freed here.
|
||||||
new_imports.items[i] = uri;
|
imports.items[i] = uri;
|
||||||
i += 1;
|
i += 1;
|
||||||
} else {
|
} else {
|
||||||
_ = new_imports.swapRemove(i);
|
_ = imports.swapRemove(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new_imports.toOwnedSlice();
|
return imports.toOwnedSlice();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const CImportSource = struct {
|
||||||
|
/// the `@cInclude` node
|
||||||
|
node: Ast.Node.Index,
|
||||||
|
/// hash of c source file
|
||||||
|
hash: [Hasher.mac_length]u8,
|
||||||
|
/// c source file
|
||||||
|
source: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Collects all `@cImport` nodes and converts them into c source code
|
||||||
|
/// the translation process is defined in `translate_c.convertCInclude`
|
||||||
|
/// Caller owns returned memory.
|
||||||
|
fn collectCIncludeSources(self: *DocumentStore, handle: *Handle) ![]CImportSource {
|
||||||
|
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
|
||||||
|
defer self.allocator.free(cimport_nodes);
|
||||||
|
|
||||||
|
var sources = try std.ArrayListUnmanaged(CImportSource).initCapacity(self.allocator, cimport_nodes.len);
|
||||||
|
errdefer {
|
||||||
|
for (sources.items) |item| {
|
||||||
|
self.allocator.free(item.source);
|
||||||
|
}
|
||||||
|
sources.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (cimport_nodes) |node| {
|
||||||
|
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, node) catch |err| switch (err) {
|
||||||
|
error.Unsupported => continue,
|
||||||
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
|
};
|
||||||
|
|
||||||
|
var hasher = hasher_init;
|
||||||
|
hasher.update(c_source);
|
||||||
|
var hash: [Hasher.mac_length]u8 = undefined;
|
||||||
|
hasher.final(&hash);
|
||||||
|
|
||||||
|
sources.appendAssumeCapacity(.{
|
||||||
|
.node = node,
|
||||||
|
.hash = hash,
|
||||||
|
.source = c_source,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return sources.toOwnedSlice(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const CImportHandle = struct {
|
||||||
|
/// the `@cInclude` node
|
||||||
|
node: Ast.Node.Index,
|
||||||
|
/// hash of the c source file
|
||||||
|
hash: [Hasher.mac_length]u8,
|
||||||
|
/// uri to a zig source file generated with translate-c
|
||||||
|
uri: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Collects all `@cImport` nodes and converts them into zig files using translate-c
|
||||||
|
/// Caller owns returned memory.
|
||||||
|
fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
|
||||||
|
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
|
||||||
|
defer self.allocator.free(cimport_nodes);
|
||||||
|
|
||||||
|
var uris = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len);
|
||||||
|
errdefer {
|
||||||
|
for (uris.items) |item| {
|
||||||
|
self.allocator.free(item.uri);
|
||||||
|
}
|
||||||
|
uris.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (cimport_nodes) |node| {
|
||||||
|
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, node) catch |err| switch (err) {
|
||||||
|
error.Unsupported => continue,
|
||||||
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
|
};
|
||||||
|
defer self.allocator.free(c_source);
|
||||||
|
|
||||||
|
const uri = self.translate(handle, c_source) catch |err| {
|
||||||
|
std.log.warn("failed to translate cInclude: {}", .{err});
|
||||||
|
continue;
|
||||||
|
} orelse continue;
|
||||||
|
errdefer self.allocator.free(uri);
|
||||||
|
|
||||||
|
var hasher = hasher_init;
|
||||||
|
hasher.update(c_source);
|
||||||
|
var hash: [Hasher.mac_length]u8 = undefined;
|
||||||
|
hasher.final(&hash);
|
||||||
|
|
||||||
|
uris.appendAssumeCapacity(.{
|
||||||
|
.node = node,
|
||||||
|
.hash = hash,
|
||||||
|
.uri = uri,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return uris.toOwnedSlice(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]const u8 {
|
||||||
|
const dirs: []BuildConfig.IncludeDir = if (handle.associated_build_file) |build_file| build_file.config.include_dirs else &.{};
|
||||||
|
const include_dirs = blk: {
|
||||||
|
var result = try self.allocator.alloc([]const u8, dirs.len);
|
||||||
|
errdefer self.allocator.free(result);
|
||||||
|
|
||||||
|
for (dirs) |dir, i| {
|
||||||
|
result[i] = dir.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
break :blk result;
|
||||||
|
};
|
||||||
|
defer self.allocator.free(include_dirs);
|
||||||
|
|
||||||
|
const file_path = (try translate_c.translate(
|
||||||
|
self.allocator,
|
||||||
|
self.config,
|
||||||
|
include_dirs,
|
||||||
|
source,
|
||||||
|
)) orelse return null;
|
||||||
|
defer self.allocator.free(file_path);
|
||||||
|
|
||||||
|
return try URI.fromPath(self.allocator, file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
|
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
|
||||||
@ -506,28 +691,37 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
handle.document_scope.deinit(self.allocator);
|
handle.document_scope.deinit(self.allocator);
|
||||||
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
|
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
|
||||||
|
|
||||||
const new_imports = try self.collectImportUris(handle);
|
var old_imports = handle.import_uris;
|
||||||
errdefer {
|
var old_cimports = handle.cimports;
|
||||||
for (new_imports) |imp| {
|
|
||||||
self.allocator.free(imp);
|
handle.import_uris = try self.collectImportUris(handle);
|
||||||
}
|
|
||||||
self.allocator.free(new_imports);
|
handle.cimports = try self.refreshDocumentCIncludes(handle);
|
||||||
}
|
|
||||||
|
|
||||||
const old_imports = handle.import_uris;
|
|
||||||
handle.import_uris = new_imports;
|
|
||||||
defer {
|
defer {
|
||||||
for (old_imports) |uri| {
|
for (old_imports) |uri| {
|
||||||
self.allocator.free(uri);
|
self.allocator.free(uri);
|
||||||
}
|
}
|
||||||
self.allocator.free(old_imports);
|
self.allocator.free(old_imports);
|
||||||
|
|
||||||
|
for (old_cimports) |old_cimport| {
|
||||||
|
self.allocator.free(old_cimport.uri);
|
||||||
|
}
|
||||||
|
self.allocator.free(old_cimports);
|
||||||
}
|
}
|
||||||
|
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (i < handle.imports_used.items.len) {
|
while (i < handle.imports_used.items.len) {
|
||||||
const old = handle.imports_used.items[i];
|
const old = handle.imports_used.items[i];
|
||||||
still_exists: {
|
still_exists: {
|
||||||
for (new_imports) |new| {
|
for (handle.import_uris) |new| {
|
||||||
|
if (std.mem.eql(u8, new, old)) {
|
||||||
|
handle.imports_used.items[i] = new;
|
||||||
|
break :still_exists;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (handle.cimports) |cimport| {
|
||||||
|
const new = cimport.uri;
|
||||||
if (std.mem.eql(u8, new, old)) {
|
if (std.mem.eql(u8, new, old)) {
|
||||||
handle.imports_used.items[i] = new;
|
handle.imports_used.items[i] = new;
|
||||||
break :still_exists;
|
break :still_exists;
|
||||||
@ -542,18 +736,104 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
|
||||||
|
const new_sources: []CImportSource = try self.collectCIncludeSources(handle);
|
||||||
|
defer {
|
||||||
|
for (new_sources) |new_source| {
|
||||||
|
self.allocator.free(new_source.source);
|
||||||
|
}
|
||||||
|
self.allocator.free(new_sources);
|
||||||
|
}
|
||||||
|
|
||||||
|
var old_cimports = handle.cimports;
|
||||||
|
var new_cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, new_sources.len);
|
||||||
|
errdefer {
|
||||||
|
for (new_cimports.items) |new_cimport| {
|
||||||
|
self.allocator.free(new_cimport.uri);
|
||||||
|
}
|
||||||
|
new_cimports.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (new_sources) |new_source| {
|
||||||
|
const maybe_old_cimport: ?CImportHandle = blk: {
|
||||||
|
const old_cimport: CImportHandle = found: {
|
||||||
|
for (old_cimports) |old_cimport| {
|
||||||
|
if (new_source.node == old_cimport.node) {
|
||||||
|
break :found old_cimport;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break :blk null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// avoid re-translating if the source didn't change
|
||||||
|
if (std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) {
|
||||||
|
break :blk CImportHandle{
|
||||||
|
.node = old_cimport.node,
|
||||||
|
.hash = old_cimport.hash,
|
||||||
|
.uri = try self.allocator.dupe(u8, old_cimport.uri),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const new_uri = self.translate(handle, new_source.source) catch |err| {
|
||||||
|
std.log.warn("failed to translate cInclude: {}", .{err});
|
||||||
|
continue;
|
||||||
|
} orelse continue;
|
||||||
|
errdefer self.allocator.free(new_uri);
|
||||||
|
|
||||||
|
break :blk CImportHandle{
|
||||||
|
.node = old_cimport.node,
|
||||||
|
.hash = old_cimport.hash,
|
||||||
|
.uri = new_uri,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
if (maybe_old_cimport) |cimport| {
|
||||||
|
new_cimports.appendAssumeCapacity(cimport);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, new_source.node) catch |err| switch (err) {
|
||||||
|
error.Unsupported => continue,
|
||||||
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
|
};
|
||||||
|
defer self.allocator.free(c_source);
|
||||||
|
|
||||||
|
var hasher = hasher_init;
|
||||||
|
var hash: [Hasher.mac_length]u8 = undefined;
|
||||||
|
hasher.update(c_source);
|
||||||
|
hasher.final(&hash);
|
||||||
|
|
||||||
|
const new_uri = self.translate(
|
||||||
|
handle,
|
||||||
|
c_source,
|
||||||
|
) catch |err| {
|
||||||
|
std.log.warn("failed to translate cInclude: {}", .{err});
|
||||||
|
continue;
|
||||||
|
} orelse continue;
|
||||||
|
errdefer self.allocator.free(new_uri);
|
||||||
|
|
||||||
|
new_cimports.appendAssumeCapacity(.{
|
||||||
|
.node = new_source.node,
|
||||||
|
.hash = hash,
|
||||||
|
.uri = new_uri,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return new_cimports.toOwnedSlice(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
|
pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
|
||||||
if (handle.is_build_file) |build_file| {
|
if (handle.is_build_file) |build_file| {
|
||||||
loadPackages(.{
|
loadBuildConfiguration(.{
|
||||||
.build_file = build_file,
|
.build_file = build_file,
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
.build_runner_path = self.config.build_runner_path.?,
|
.build_runner_path = self.config.build_runner_path.?,
|
||||||
.build_runner_cache_path = self.config.build_runner_cache_path.?,
|
.global_cache_path = self.config.global_cache_path.?,
|
||||||
.zig_exe_path = self.config.zig_exe_path.?,
|
.zig_exe_path = self.config.zig_exe_path.?,
|
||||||
.cache_root = self.zig_cache_root,
|
.cache_root = self.zig_cache_root,
|
||||||
.global_cache_root = self.zig_global_cache_root,
|
.global_cache_root = self.zig_global_cache_root,
|
||||||
}) catch |err| {
|
}) catch |err| {
|
||||||
log.err("Failed to load packages of build file {s} (error: {})", .{ build_file.uri, err });
|
log.err("Failed to load build configuration for {s} (error: {})", .{ build_file.uri, err });
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -643,7 +923,7 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand
|
|||||||
return null;
|
return null;
|
||||||
} else if (!std.mem.endsWith(u8, import_str, ".zig")) {
|
} else if (!std.mem.endsWith(u8, import_str, ".zig")) {
|
||||||
if (handle.associated_build_file) |build_file| {
|
if (handle.associated_build_file) |build_file| {
|
||||||
for (build_file.packages.items) |pkg| {
|
for (build_file.config.packages) |pkg| {
|
||||||
if (std.mem.eql(u8, import_str, pkg.name)) {
|
if (std.mem.eql(u8, import_str, pkg.name)) {
|
||||||
return try allocator.dupe(u8, pkg.uri);
|
return try allocator.dupe(u8, pkg.uri);
|
||||||
}
|
}
|
||||||
@ -686,7 +966,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (handle.associated_build_file) |bf| {
|
if (handle.associated_build_file) |bf| {
|
||||||
for (bf.packages.items) |pkg| {
|
for (bf.config.packages) |pkg| {
|
||||||
if (std.mem.eql(u8, pkg.uri, final_uri)) {
|
if (std.mem.eql(u8, pkg.uri, final_uri)) {
|
||||||
break :find_uri pkg.uri;
|
break :find_uri pkg.uri;
|
||||||
}
|
}
|
||||||
@ -706,18 +986,52 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
}
|
}
|
||||||
|
|
||||||
// New document, read the file then call into openDocument.
|
// New document, read the file then call into openDocument.
|
||||||
const file_path = try URI.parse(allocator, final_uri);
|
var document_handle = try self.newDocumentFromUri(final_uri);
|
||||||
defer allocator.free(file_path);
|
|
||||||
|
|
||||||
var file = std.fs.cwd().openFile(file_path, .{}) catch {
|
// Add to import table of current handle.
|
||||||
log.debug("Cannot open import file {s}", .{file_path});
|
try handle.imports_used.append(allocator, handle_uri);
|
||||||
|
|
||||||
|
return document_handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Index) !?*Handle {
|
||||||
|
const uri = blk: {
|
||||||
|
for (handle.cimports) |item| {
|
||||||
|
if (item.node == node) break :blk item.uri;
|
||||||
|
}
|
||||||
return null;
|
return null;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Check if the import is already opened by others.
|
||||||
|
if (self.getHandle(uri)) |new_handle| {
|
||||||
|
// If it is, append it to our imports, increment the count, set our new handle
|
||||||
|
// and return the parsed tree root node.
|
||||||
|
try handle.imports_used.append(self.allocator, uri);
|
||||||
|
new_handle.count += 1;
|
||||||
|
return new_handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
// New document, read the file then call into openDocument.
|
||||||
|
var document_handle = try self.newDocumentFromUri(uri);
|
||||||
|
|
||||||
|
// Add to cimport table of current handle.
|
||||||
|
try handle.imports_used.append(self.allocator, uri);
|
||||||
|
|
||||||
|
return document_handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn newDocumentFromUri(self: *DocumentStore, uri: []const u8) !?*Handle {
|
||||||
|
const file_path = try URI.parse(self.allocator, uri);
|
||||||
|
defer self.allocator.free(file_path);
|
||||||
|
|
||||||
|
var file = std.fs.openFileAbsolute(file_path, .{}) catch |err| {
|
||||||
|
log.debug("Cannot open file '{s}': {}", .{ file_path, err });
|
||||||
|
return null;
|
||||||
|
};
|
||||||
defer file.close();
|
defer file.close();
|
||||||
{
|
|
||||||
const file_contents = file.readToEndAllocOptions(
|
const file_contents = file.readToEndAllocOptions(
|
||||||
allocator,
|
self.allocator,
|
||||||
std.math.maxInt(usize),
|
std.math.maxInt(usize),
|
||||||
null,
|
null,
|
||||||
@alignOf(u8),
|
@alignOf(u8),
|
||||||
@ -729,26 +1043,24 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
errdefer allocator.free(file_contents);
|
errdefer self.allocator.free(file_contents);
|
||||||
|
|
||||||
// Add to import table of current handle.
|
return try self.newDocument(try self.allocator.dupe(u8, uri), file_contents);
|
||||||
try handle.imports_used.append(self.allocator, handle_uri);
|
|
||||||
// Swap handles.
|
|
||||||
// This takes ownership of the passed uri and text.
|
|
||||||
const duped_final_uri = try allocator.dupe(u8, final_uri);
|
|
||||||
errdefer allocator.free(duped_final_uri);
|
|
||||||
return try self.newDocument(duped_final_uri, file_contents);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stdUriFromLibPath(allocator: std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
fn stdUriFromLibPath(allocator: std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
||||||
if (zig_lib_path) |zpath| {
|
if (zig_lib_path) |zpath| {
|
||||||
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
zpath, "./std/std.zig",
|
zpath, "./std/std.zig",
|
||||||
}) catch |err| {
|
}) catch |first_std_err| blk: {
|
||||||
log.debug("Failed to resolve zig std library path, error: {}", .{err});
|
// workaround for https://github.com/ziglang/zig/issues/12516
|
||||||
|
break :blk std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
|
zpath, "./zig/std/std.zig",
|
||||||
|
}) catch {
|
||||||
|
log.debug("Failed to resolve zig std library path, error: {}", .{first_std_err});
|
||||||
return null;
|
return null;
|
||||||
};
|
};
|
||||||
|
};
|
||||||
|
|
||||||
defer allocator.free(std_path);
|
defer allocator.free(std_path);
|
||||||
// Get the std_path as a URI, so we can just append to it!
|
// Get the std_path as a URI, so we can just append to it!
|
||||||
@ -768,6 +1080,10 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
self.allocator.free(uri);
|
self.allocator.free(uri);
|
||||||
}
|
}
|
||||||
self.allocator.free(entry.value_ptr.*.import_uris);
|
self.allocator.free(entry.value_ptr.*.import_uris);
|
||||||
|
for (entry.value_ptr.*.cimports) |cimport| {
|
||||||
|
self.allocator.free(cimport.uri);
|
||||||
|
}
|
||||||
|
self.allocator.free(entry.value_ptr.*.cimports);
|
||||||
entry.value_ptr.*.imports_used.deinit(self.allocator);
|
entry.value_ptr.*.imports_used.deinit(self.allocator);
|
||||||
self.allocator.free(entry.key_ptr.*);
|
self.allocator.free(entry.key_ptr.*);
|
||||||
self.allocator.destroy(entry.value_ptr.*);
|
self.allocator.destroy(entry.value_ptr.*);
|
||||||
@ -775,11 +1091,7 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
|
|
||||||
self.handles.deinit(self.allocator);
|
self.handles.deinit(self.allocator);
|
||||||
for (self.build_files.items) |build_file| {
|
for (self.build_files.items) |build_file| {
|
||||||
for (build_file.packages.items) |pkg| {
|
build_file.config.deinit(self.allocator);
|
||||||
self.allocator.free(pkg.name);
|
|
||||||
self.allocator.free(pkg.uri);
|
|
||||||
}
|
|
||||||
build_file.packages.deinit(self.allocator);
|
|
||||||
self.allocator.free(build_file.uri);
|
self.allocator.free(build_file.uri);
|
||||||
build_file.destroy(self.allocator);
|
build_file.destroy(self.allocator);
|
||||||
}
|
}
|
||||||
@ -808,6 +1120,7 @@ fn tagStoreCompletionItems(self: DocumentStore, arena: *std.heap.ArenaAllocator,
|
|||||||
result_set.putAssumeCapacity(completion, {});
|
result_set.putAssumeCapacity(completion, {});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result_set.entries.items(.key);
|
return result_set.entries.items(.key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -240,11 +240,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
scopes: for (handle.document_scope.scopes) |scope| {
|
scopes: for (handle.document_scope.scopes) |scope| {
|
||||||
const scope_data = switch (scope.data) {
|
const scope_data = switch (scope.data) {
|
||||||
.function => |f| b: {
|
.function => |f| b: {
|
||||||
var buf: [1]std.zig.Ast.Node.Index = undefined;
|
if (!ast.fnProtoHasBody(tree, f).?) continue :scopes;
|
||||||
var proto = ast.fnProto(tree, f, &buf) orelse break :b f;
|
|
||||||
if (proto.extern_export_inline_token) |tok| {
|
|
||||||
if (std.mem.eql(u8, tree.tokenSlice(tok), "extern")) continue :scopes;
|
|
||||||
}
|
|
||||||
break :b f;
|
break :b f;
|
||||||
},
|
},
|
||||||
.block => |b| b,
|
.block => |b| b,
|
||||||
@ -255,7 +251,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
while (decl_iterator.next()) |decl| {
|
while (decl_iterator.next()) |decl| {
|
||||||
var identifier_count: usize = 0;
|
var identifier_count: usize = 0;
|
||||||
|
|
||||||
var name_token_index = switch (decl.value_ptr.*) {
|
const name_token_index = switch (decl.value_ptr.*) {
|
||||||
.ast_node => |an| s: {
|
.ast_node => |an| s: {
|
||||||
const an_tag = tree.nodes.items(.tag)[an];
|
const an_tag = tree.nodes.items(.tag)[an];
|
||||||
switch (an_tag) {
|
switch (an_tag) {
|
||||||
@ -275,8 +271,27 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
const pit_start = tree.firstToken(scope_data);
|
const pit_start = tree.firstToken(scope_data);
|
||||||
const pit_end = ast.lastToken(tree, scope_data);
|
const pit_end = ast.lastToken(tree, scope_data);
|
||||||
|
|
||||||
for (tree.tokens.items(.tag)[pit_start..pit_end]) |tag, index| {
|
const tags = tree.tokens.items(.tag)[pit_start..pit_end];
|
||||||
if (tag == .identifier and std.mem.eql(u8, tree.tokenSlice(pit_start + @intCast(u32, index)), tree.tokenSlice(name_token_index))) identifier_count += 1;
|
for (tags) |tag, index| {
|
||||||
|
if (tag != .identifier) continue;
|
||||||
|
if (!std.mem.eql(u8, tree.tokenSlice(pit_start + @intCast(u32, index)), tree.tokenSlice(name_token_index))) continue;
|
||||||
|
if (index -| 1 > 0 and tags[index - 1] == .period) continue;
|
||||||
|
if (index +| 2 < tags.len and tags[index + 1] == .colon) switch (tags[index + 2]) {
|
||||||
|
.l_brace,
|
||||||
|
.keyword_inline,
|
||||||
|
.keyword_while,
|
||||||
|
.keyword_for,
|
||||||
|
.keyword_switch,
|
||||||
|
=> continue,
|
||||||
|
else => {},
|
||||||
|
};
|
||||||
|
if (index -| 2 > 0 and tags[index - 1] == .colon) switch (tags[index - 2]) {
|
||||||
|
.keyword_break,
|
||||||
|
.keyword_continue,
|
||||||
|
=> continue,
|
||||||
|
else => {},
|
||||||
|
};
|
||||||
|
identifier_count += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (identifier_count <= 1)
|
if (identifier_count <= 1)
|
||||||
@ -300,17 +315,8 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
|||||||
|
|
||||||
if (!std.mem.eql(u8, call_name, "@import")) continue;
|
if (!std.mem.eql(u8, call_name, "@import")) continue;
|
||||||
|
|
||||||
const node_data = tree.nodes.items(.data)[node];
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const params = switch (tree.nodes.items(.tag)[node]) {
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[node_data.lhs..node_data.rhs],
|
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (node_data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (node_data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{node_data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ node_data.lhs, node_data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (params.len != 1) continue;
|
if (params.len != 1) continue;
|
||||||
|
|
||||||
@ -1145,12 +1151,15 @@ fn referencesDefinitionGlobal(
|
|||||||
|
|
||||||
const result: types.ResponseParams = if (highlight) result: {
|
const result: types.ResponseParams = if (highlight) result: {
|
||||||
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
||||||
|
const uri = handle.uri();
|
||||||
for (locs.items) |loc| {
|
for (locs.items) |loc| {
|
||||||
|
if (std.mem.eql(u8, loc.uri, uri)) {
|
||||||
highlights.appendAssumeCapacity(.{
|
highlights.appendAssumeCapacity(.{
|
||||||
.range = loc.range,
|
.range = loc.range,
|
||||||
.kind = .Text,
|
.kind = .Text,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break :result .{ .DocumentHighlight = highlights.items };
|
break :result .{ .DocumentHighlight = highlights.items };
|
||||||
} else .{ .Locations = locs.items };
|
} else .{ .Locations = locs.items };
|
||||||
|
|
||||||
@ -1188,12 +1197,15 @@ fn referencesDefinitionFieldAccess(
|
|||||||
);
|
);
|
||||||
const result: types.ResponseParams = if (highlight) result: {
|
const result: types.ResponseParams = if (highlight) result: {
|
||||||
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
||||||
|
const uri = handle.uri();
|
||||||
for (locs.items) |loc| {
|
for (locs.items) |loc| {
|
||||||
|
if (std.mem.eql(u8, loc.uri, uri)) {
|
||||||
highlights.appendAssumeCapacity(.{
|
highlights.appendAssumeCapacity(.{
|
||||||
.range = loc.range,
|
.range = loc.range,
|
||||||
.kind = .Text,
|
.kind = .Text,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break :result .{ .DocumentHighlight = highlights.items };
|
break :result .{ .DocumentHighlight = highlights.items };
|
||||||
} else .{ .Locations = locs.items };
|
} else .{ .Locations = locs.items };
|
||||||
try send(writer, server.arena.allocator(), types.Response{
|
try send(writer, server.arena.allocator(), types.Response{
|
||||||
@ -1219,12 +1231,15 @@ fn referencesDefinitionLabel(
|
|||||||
try references.labelReferences(&server.arena, decl, server.offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append);
|
try references.labelReferences(&server.arena, decl, server.offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append);
|
||||||
const result: types.ResponseParams = if (highlight) result: {
|
const result: types.ResponseParams = if (highlight) result: {
|
||||||
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
var highlights = try std.ArrayList(types.DocumentHighlight).initCapacity(server.arena.allocator(), locs.items.len);
|
||||||
|
const uri = handle.uri();
|
||||||
for (locs.items) |loc| {
|
for (locs.items) |loc| {
|
||||||
|
if (std.mem.eql(u8, loc.uri, uri)) {
|
||||||
highlights.appendAssumeCapacity(.{
|
highlights.appendAssumeCapacity(.{
|
||||||
.range = loc.range,
|
.range = loc.range,
|
||||||
.kind = .Text,
|
.kind = .Text,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break :result .{ .DocumentHighlight = highlights.items };
|
break :result .{ .DocumentHighlight = highlights.items };
|
||||||
} else .{ .Locations = locs.items };
|
} else .{ .Locations = locs.items };
|
||||||
try send(writer, server.arena.allocator(), types.Response{
|
try send(writer, server.arena.allocator(), types.Response{
|
||||||
@ -1778,7 +1793,7 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
},
|
},
|
||||||
.textDocumentSync = .Full,
|
.textDocumentSync = .Full,
|
||||||
.renameProvider = true,
|
.renameProvider = true,
|
||||||
.completionProvider = .{ .resolveProvider = false, .triggerCharacters = &[_][]const u8{ ".", ":", "@" }, .completionItem = .{ .labelDetailsSupport = true } },
|
.completionProvider = .{ .resolveProvider = false, .triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" }, .completionItem = .{ .labelDetailsSupport = true } },
|
||||||
.documentHighlightProvider = true,
|
.documentHighlightProvider = true,
|
||||||
.hoverProvider = true,
|
.hoverProvider = true,
|
||||||
.codeActionProvider = false,
|
.codeActionProvider = false,
|
||||||
@ -2049,9 +2064,9 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
|
|
||||||
if (!subpath_present and pos_context == .import_string_literal) {
|
if (!subpath_present and pos_context == .import_string_literal) {
|
||||||
if (handle.associated_build_file) |bf| {
|
if (handle.associated_build_file) |bf| {
|
||||||
try fsl_completions.ensureUnusedCapacity(server.arena.allocator(), bf.packages.items.len);
|
try fsl_completions.ensureUnusedCapacity(server.arena.allocator(), bf.config.packages.len);
|
||||||
|
|
||||||
for (bf.packages.items) |pkg| {
|
for (bf.config.packages) |pkg| {
|
||||||
try fsl_completions.append(server.arena.allocator(), .{
|
try fsl_completions.append(server.arena.allocator(), .{
|
||||||
.label = pkg.name,
|
.label = pkg.name,
|
||||||
.kind = .Module,
|
.kind = .Module,
|
||||||
@ -2480,7 +2495,6 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
std.debug.assert(tree.root.Object.get("method") != null);
|
|
||||||
const method = tree.root.Object.get("method").?.String;
|
const method = tree.root.Object.get("method").?.String;
|
||||||
|
|
||||||
const start_time = std.time.milliTimestamp();
|
const start_time = std.time.milliTimestamp();
|
||||||
@ -2520,6 +2534,7 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Hack to avoid `return`ing in the inline for, which causes bugs.
|
// Hack to avoid `return`ing in the inline for, which causes bugs.
|
||||||
|
// TODO: Change once stage2 is shipped and more stable?
|
||||||
var done: ?anyerror = null;
|
var done: ?anyerror = null;
|
||||||
inline for (method_map) |method_info| {
|
inline for (method_map) |method_info| {
|
||||||
if (done == null and std.mem.eql(u8, method, method_info[0])) {
|
if (done == null and std.mem.eql(u8, method, method_info[0])) {
|
||||||
|
189
src/analysis.zig
189
src/analysis.zig
@ -203,7 +203,7 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isPtrType(tree, param.type_expr)) {
|
if (ast.isPtrType(tree, param.type_expr)) {
|
||||||
if (try resolveTypeOfNode(document_store, arena, .{
|
if (try resolveTypeOfNode(document_store, arena, .{
|
||||||
.node = token_data[param.type_expr].rhs,
|
.node = token_data[param.type_expr].rhs,
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
@ -222,6 +222,9 @@ pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 {
|
pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 {
|
||||||
|
if(field.ast.value_expr == 0 and field.ast.type_expr == 0 and field.ast.align_expr == 0) {
|
||||||
|
return ""; // TODO display the container's type
|
||||||
|
}
|
||||||
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
||||||
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
||||||
const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end;
|
const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end;
|
||||||
@ -311,13 +314,6 @@ fn getDeclName(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isContainerDecl(decl_handle: DeclWithHandle) bool {
|
|
||||||
return switch (decl_handle.decl.*) {
|
|
||||||
.ast_node => |inner_node| ast.isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]),
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, node_handle: NodeWithHandle, root: bool) error{OutOfMemory}!?DeclWithHandle {
|
fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, node_handle: NodeWithHandle, root: bool) error{OutOfMemory}!?DeclWithHandle {
|
||||||
_ = root;
|
_ = root;
|
||||||
const handle = node_handle.handle;
|
const handle = node_handle.handle;
|
||||||
@ -341,7 +337,8 @@ fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllo
|
|||||||
const lhs = datas[node_handle.node].lhs;
|
const lhs = datas[node_handle.node].lhs;
|
||||||
|
|
||||||
const container_node = if (ast.isBuiltinCall(tree, lhs)) block: {
|
const container_node = if (ast.isBuiltinCall(tree, lhs)) block: {
|
||||||
if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import"))
|
const name = tree.tokenSlice(main_tokens[lhs]);
|
||||||
|
if (!std.mem.eql(u8, name, "@import") and !std.mem.eql(u8, name, "@cImport"))
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle })) orelse return null;
|
const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle })) orelse return null;
|
||||||
@ -394,39 +391,14 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
|
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
|
||||||
.block,
|
|
||||||
.block_semicolon,
|
|
||||||
.block_two,
|
|
||||||
.block_two_semicolon,
|
|
||||||
=> true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index {
|
fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index {
|
||||||
var result: ?Ast.Node.Index = null;
|
var result: ?Ast.Node.Index = null;
|
||||||
|
|
||||||
const node_tags = tree.nodes.items(.tag);
|
const node_tags = tree.nodes.items(.tag);
|
||||||
const datas = tree.nodes.items(.data);
|
const datas = tree.nodes.items(.data);
|
||||||
|
|
||||||
if (!isBlock(tree, body)) return null;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
|
const statements = ast.blockStatements(tree, body, &buffer) orelse return null;
|
||||||
const statements: []const Ast.Node.Index = switch (node_tags[body]) {
|
|
||||||
.block, .block_semicolon => tree.extra_data[datas[body].lhs..datas[body].rhs],
|
|
||||||
.block_two, .block_two_semicolon => blk: {
|
|
||||||
const statements = &[_]Ast.Node.Index{ datas[body].lhs, datas[body].rhs };
|
|
||||||
const len: usize = if (datas[body].lhs == 0)
|
|
||||||
@as(usize, 0)
|
|
||||||
else if (datas[body].rhs == 0)
|
|
||||||
@as(usize, 1)
|
|
||||||
else
|
|
||||||
@as(usize, 2);
|
|
||||||
break :blk statements[0..len];
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (statements) |child_idx| {
|
for (statements) |child_idx| {
|
||||||
if (node_tags[child_idx] == .@"return") {
|
if (node_tags[child_idx] == .@"return") {
|
||||||
@ -532,17 +504,6 @@ fn resolveUnwrapErrorType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
|
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
|
||||||
.ptr_type,
|
|
||||||
.ptr_type_aligned,
|
|
||||||
.ptr_type_bit_range,
|
|
||||||
.ptr_type_sentinel,
|
|
||||||
=> true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolves the child type of a deref type
|
/// Resolves the child type of a deref type
|
||||||
fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, deref: TypeWithHandle, bound_type_params: *BoundTypeParams) !?TypeWithHandle {
|
fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, deref: TypeWithHandle, bound_type_params: *BoundTypeParams) !?TypeWithHandle {
|
||||||
const deref_node = switch (deref.type.data) {
|
const deref_node = switch (deref.type.data) {
|
||||||
@ -560,7 +521,7 @@ fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, dere
|
|||||||
const main_token = tree.nodes.items(.main_token)[deref_node];
|
const main_token = tree.nodes.items(.main_token)[deref_node];
|
||||||
const token_tag = tree.tokens.items(.tag)[main_token];
|
const token_tag = tree.tokens.items(.tag)[main_token];
|
||||||
|
|
||||||
if (isPtrType(tree, deref_node)) {
|
if (ast.isPtrType(tree, deref_node)) {
|
||||||
const ptr_type = ast.ptrType(tree, deref_node).?;
|
const ptr_type = ast.ptrType(tree, deref_node).?;
|
||||||
switch (token_tag) {
|
switch (token_tag) {
|
||||||
.asterisk => {
|
.asterisk => {
|
||||||
@ -900,17 +861,8 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
.builtin_call_two,
|
.builtin_call_two,
|
||||||
.builtin_call_two_comma,
|
.builtin_call_two_comma,
|
||||||
=> {
|
=> {
|
||||||
const data = datas[node];
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const params = switch (node_tags[node]) {
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
const call_name = tree.tokenSlice(main_tokens[node]);
|
const call_name = tree.tokenSlice(main_tokens[node]);
|
||||||
if (std.mem.eql(u8, call_name, "@This")) {
|
if (std.mem.eql(u8, call_name, "@This")) {
|
||||||
@ -953,9 +905,8 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
return resolved_type;
|
return resolved_type;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
if (std.mem.eql(u8, call_name, "@import")) {
|
||||||
if (params.len == 0) return null;
|
if (params.len == 0) return null;
|
||||||
|
|
||||||
const import_param = params[0];
|
const import_param = params[0];
|
||||||
if (node_tags[import_param] != .string_literal) return null;
|
if (node_tags[import_param] != .string_literal) return null;
|
||||||
|
|
||||||
@ -967,6 +918,15 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
|
|
||||||
// reference to node '0' which is root
|
// reference to node '0' which is root
|
||||||
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
|
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
|
||||||
|
} else if (std.mem.eql(u8, call_name, "@cImport")) {
|
||||||
|
const new_handle = (store.resolveCImport(handle, node) catch |err| {
|
||||||
|
log.debug("Error {} while processing cImport", .{err}); // TODO improve
|
||||||
|
return null;
|
||||||
|
}) orelse return null;
|
||||||
|
|
||||||
|
// reference to node '0' which is root
|
||||||
|
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
|
||||||
|
}
|
||||||
},
|
},
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
@ -1126,8 +1086,17 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator,
|
|||||||
return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params);
|
return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collects all imports we can find into a slice of import paths (without quotes).
|
/// Collects all `@import`'s we can find into a slice of import paths (without quotes).
|
||||||
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Ast) !void {
|
/// Caller owns returned memory.
|
||||||
|
pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}![][]const u8 {
|
||||||
|
var imports = std.ArrayListUnmanaged([]const u8){};
|
||||||
|
errdefer {
|
||||||
|
for (imports.items) |imp| {
|
||||||
|
allocator.free(imp);
|
||||||
|
}
|
||||||
|
imports.deinit(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
const tags = tree.tokens.items(.tag);
|
const tags = tree.tokens.items(.tag);
|
||||||
|
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
@ -1147,9 +1116,33 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Ast) !void {
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
const str = tree.tokenSlice(@intCast(u32, i + 2));
|
const str = tree.tokenSlice(@intCast(u32, i + 2));
|
||||||
try import_arr.append(str[1 .. str.len - 1]);
|
try imports.append(allocator, str[1 .. str.len - 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return imports.toOwnedSlice(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collects all `@cImport` nodes
|
||||||
|
/// Caller owns returned memory.
|
||||||
|
pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}![]Ast.Node.Index {
|
||||||
|
var import_nodes = std.ArrayListUnmanaged(Ast.Node.Index){};
|
||||||
|
errdefer import_nodes.deinit(allocator);
|
||||||
|
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < node_tags.len) : (i += 1) {
|
||||||
|
const node = @intCast(Ast.Node.Index, i);
|
||||||
|
if (!ast.isBuiltinCall(tree, node)) continue;
|
||||||
|
|
||||||
|
if (!std.mem.eql(u8, Ast.tokenSlice(tree, main_tokens[node]), "@cImport")) continue;
|
||||||
|
|
||||||
|
try import_nodes.append(allocator, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
return import_nodes.toOwnedSlice(allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const NodeWithHandle = struct {
|
pub const NodeWithHandle = struct {
|
||||||
@ -1390,34 +1383,22 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con
|
|||||||
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
|
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!nodeContainsSourceIndex(tree, node, source_index)) {
|
if (!nodeContainsSourceIndex(tree, node, source_index)) return null;
|
||||||
return null;
|
|
||||||
}
|
if (!ast.isBuiltinCall(tree, node)) return null;
|
||||||
|
|
||||||
if (ast.isBuiltinCall(tree, node)) {
|
|
||||||
const builtin_token = tree.nodes.items(.main_token)[node];
|
const builtin_token = tree.nodes.items(.main_token)[node];
|
||||||
const call_name = tree.tokenSlice(builtin_token);
|
const call_name = tree.tokenSlice(builtin_token);
|
||||||
|
|
||||||
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
||||||
const data = tree.nodes.items(.data)[node];
|
|
||||||
const params = switch (node_tags[node]) {
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (params.len != 1) return null;
|
if (params.len != 1) return null;
|
||||||
|
|
||||||
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
|
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
|
||||||
return import_str[1 .. import_str.len - 1];
|
return import_str[1 .. import_str.len - 1];
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const SourceRange = std.zig.Token.Loc;
|
pub const SourceRange = std.zig.Token.Loc;
|
||||||
@ -2522,21 +2503,8 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const container_decl = switch (node_tag) {
|
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
|
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
|
|
||||||
.container_decl_two, .container_decl_two_trailing => blk: {
|
|
||||||
var buffer: [2]Ast.Node.Index = undefined;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
break :blk tree.containerDeclTwo(&buffer, node_idx);
|
const container_decl = ast.containerDecl(tree, node_idx, &buffer);
|
||||||
},
|
|
||||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
|
|
||||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
|
|
||||||
.tagged_union_two, .tagged_union_two_trailing => blk: {
|
|
||||||
var buffer: [2]Ast.Node.Index = undefined;
|
|
||||||
break :blk tree.taggedUnionTwo(&buffer, node_idx);
|
|
||||||
},
|
|
||||||
else => null,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Only tagged unions and enums should pass this
|
// Only tagged unions and enums should pass this
|
||||||
const can_have_enum_completions = if (container_decl) |container| blk: {
|
const can_have_enum_completions = if (container_decl) |container| blk: {
|
||||||
@ -2717,20 +2685,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
uses.deinit();
|
uses.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
const statements: []const Ast.Node.Index = switch (node_tag) {
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
.block, .block_semicolon => tree.extra_data[data[node_idx].lhs..data[node_idx].rhs],
|
const statements = ast.blockStatements(tree, node_idx, &buffer).?;
|
||||||
.block_two, .block_two_semicolon => blk: {
|
|
||||||
const statements = &[_]Ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs };
|
|
||||||
const len: usize = if (data[node_idx].lhs == 0)
|
|
||||||
@as(usize, 0)
|
|
||||||
else if (data[node_idx].rhs == 0)
|
|
||||||
@as(usize, 1)
|
|
||||||
else
|
|
||||||
@as(usize, 2);
|
|
||||||
break :blk statements[0..len];
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (statements) |idx| {
|
for (statements) |idx| {
|
||||||
if (tags[idx] == .@"usingnamespace") {
|
if (tags[idx] == .@"usingnamespace") {
|
||||||
@ -3058,17 +3014,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
|||||||
.builtin_call_two,
|
.builtin_call_two,
|
||||||
.builtin_call_two_comma,
|
.builtin_call_two_comma,
|
||||||
=> {
|
=> {
|
||||||
const b_data = data[node_idx];
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const params = switch (node_tag) {
|
const params = ast.builtinCallParams(tree, node_idx, &buffer).?;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[b_data.lhs..b_data.rhs],
|
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (b_data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (b_data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{b_data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ b_data.lhs, b_data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (params) |param| {
|
for (params) |param| {
|
||||||
try makeScopeInternal(allocator, context, param);
|
try makeScopeInternal(allocator, context, param);
|
||||||
|
90
src/ast.zig
90
src/ast.zig
@ -948,6 +948,18 @@ pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn containerDecl(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) ?full.ContainerDecl {
|
||||||
|
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||||
|
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
|
||||||
|
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
|
||||||
|
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx),
|
||||||
|
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
|
||||||
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
|
||||||
|
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx),
|
||||||
|
else => null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the member indices of a given declaration container.
|
/// Returns the member indices of a given declaration container.
|
||||||
/// Asserts given `tag` is a container node
|
/// Asserts given `tag` is a container node
|
||||||
pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
||||||
@ -977,6 +989,17 @@ pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
|
||||||
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
|
.ptr_type,
|
||||||
|
.ptr_type_aligned,
|
||||||
|
.ptr_type_bit_range,
|
||||||
|
.ptr_type_sentinel,
|
||||||
|
=> true,
|
||||||
|
else => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
|
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.builtin_call,
|
.builtin_call,
|
||||||
@ -1003,6 +1026,29 @@ pub fn isCall(tree: Ast, node: Ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
|
||||||
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
|
.block_two,
|
||||||
|
.block_two_semicolon,
|
||||||
|
.block,
|
||||||
|
.block_semicolon,
|
||||||
|
=> true,
|
||||||
|
else => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool {
|
||||||
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
|
.fn_proto,
|
||||||
|
.fn_proto_multi,
|
||||||
|
.fn_proto_one,
|
||||||
|
.fn_proto_simple,
|
||||||
|
=> false,
|
||||||
|
.fn_decl => true,
|
||||||
|
else => null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.fn_proto => tree.fnProto(node),
|
.fn_proto => tree.fnProto(node),
|
||||||
@ -1029,3 +1075,47 @@ pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.f
|
|||||||
else => null,
|
else => null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// returns a list of parameters
|
||||||
|
pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
|
||||||
|
const node_data = tree.nodes.items(.data);
|
||||||
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
|
.builtin_call_two, .builtin_call_two_comma => {
|
||||||
|
buf[0] = node_data[node].lhs;
|
||||||
|
buf[1] = node_data[node].rhs;
|
||||||
|
if (node_data[node].lhs == 0) {
|
||||||
|
return buf[0..0];
|
||||||
|
} else if (node_data[node].rhs == 0) {
|
||||||
|
return buf[0..1];
|
||||||
|
} else {
|
||||||
|
return buf[0..2];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.builtin_call,
|
||||||
|
.builtin_call_comma,
|
||||||
|
=> tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
||||||
|
else => return null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns a list of statements
|
||||||
|
pub fn blockStatements(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
|
||||||
|
const node_data = tree.nodes.items(.data);
|
||||||
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
|
.block_two, .block_two_semicolon => {
|
||||||
|
buf[0] = node_data[node].lhs;
|
||||||
|
buf[1] = node_data[node].rhs;
|
||||||
|
if (node_data[node].lhs == 0) {
|
||||||
|
return buf[0..0];
|
||||||
|
} else if (node_data[node].rhs == 0) {
|
||||||
|
return buf[0..1];
|
||||||
|
} else {
|
||||||
|
return buf[0..2];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.block,
|
||||||
|
.block_semicolon,
|
||||||
|
=> tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
||||||
|
else => return null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -133,7 +133,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// takes parameter nodes from the ast and function parameter names from `Builtin.arguments` and writes parameter hints into `builder.hints`
|
/// takes parameter nodes from the ast and function parameter names from `Builtin.arguments` and writes parameter hints into `builder.hints`
|
||||||
fn writeBuiltinHint(builder: *Builder, parameters: []Ast.Node.Index, arguments: []const []const u8) !void {
|
fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, arguments: []const []const u8) !void {
|
||||||
if (parameters.len == 0) return;
|
if (parameters.len == 0) return;
|
||||||
|
|
||||||
const handle = builder.handle;
|
const handle = builder.handle;
|
||||||
@ -290,25 +290,9 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
|||||||
.builtin_call_comma,
|
.builtin_call_comma,
|
||||||
=> {
|
=> {
|
||||||
var buffer: [2]Ast.Node.Index = undefined;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const parameters: []Ast.Node.Index = switch (tag) {
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call_two, .builtin_call_two_comma => blk: {
|
|
||||||
buffer[0] = node_data[node].lhs;
|
|
||||||
buffer[1] = node_data[node].rhs;
|
|
||||||
|
|
||||||
var size: usize = 0;
|
if (builder.config.inlay_hints_show_builtin and params.len > 1) {
|
||||||
|
|
||||||
if (node_data[node].rhs != 0) {
|
|
||||||
size = 2;
|
|
||||||
} else if (node_data[node].lhs != 0) {
|
|
||||||
size = 1;
|
|
||||||
}
|
|
||||||
break :blk buffer[0..size];
|
|
||||||
},
|
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (builder.config.inlay_hints_show_builtin and parameters.len > 1) {
|
|
||||||
const name = tree.tokenSlice(main_tokens[node]);
|
const name = tree.tokenSlice(main_tokens[node]);
|
||||||
|
|
||||||
outer: for (data.builtins) |builtin| {
|
outer: for (data.builtins) |builtin| {
|
||||||
@ -318,12 +302,12 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
|||||||
if (std.mem.eql(u8, builtin_name, name)) break :outer;
|
if (std.mem.eql(u8, builtin_name, name)) break :outer;
|
||||||
}
|
}
|
||||||
|
|
||||||
try writeBuiltinHint(builder, parameters, builtin.arguments);
|
try writeBuiltinHint(builder, params, builtin.arguments);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (parameters) |param| {
|
for (params) |param| {
|
||||||
if (parameters.len > inlay_hints_max_inline_children) {
|
if (params.len > inlay_hints_max_inline_children) {
|
||||||
if (!isNodeInRange(tree, param, range)) continue;
|
if (!isNodeInRange(tree, param, range)) continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -623,15 +607,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
|||||||
.tagged_union_enum_tag_trailing,
|
.tagged_union_enum_tag_trailing,
|
||||||
=> {
|
=> {
|
||||||
var buffer: [2]Ast.Node.Index = undefined;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const decl: Ast.full.ContainerDecl = switch (tag) {
|
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buffer).?;
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
|
||||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buffer, node),
|
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
|
||||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node),
|
|
||||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node),
|
|
||||||
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buffer, node),
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
try await @asyncCall(child_frame, {}, writeNodeInlayHint, .{ builder, arena, store, decl.ast.arg, range });
|
try await @asyncCall(child_frame, {}, writeNodeInlayHint, .{ builder, arena, store, decl.ast.arg, range });
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 9db1b99219c767d5e24994b1525273fe4031e464
|
Subproject commit 24845b0103e611c108d6bc334231c464e699742c
|
168
src/main.zig
168
src/main.zig
@ -32,8 +32,7 @@ fn loop(server: *Server) !void {
|
|||||||
|
|
||||||
try reader.readNoEof(buffer);
|
try reader.readNoEof(buffer);
|
||||||
|
|
||||||
var writer = std.io.getStdOut().writer();
|
const writer = std.io.getStdOut().writer();
|
||||||
|
|
||||||
try server.processJsonRpc(writer, buffer);
|
try server.processJsonRpc(writer, buffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -43,8 +42,15 @@ const ConfigWithPath = struct {
|
|||||||
config_path: ?[]const u8,
|
config_path: ?[]const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn getConfig(allocator: std.mem.Allocator, config: ConfigWithPath) !ConfigWithPath {
|
fn getConfig(
|
||||||
if (config.config_path) |path| {
|
allocator: std.mem.Allocator,
|
||||||
|
config_path: ?[]const u8,
|
||||||
|
/// If true, and the provided config_path is non-null, frees
|
||||||
|
/// the aforementioned path, in the case that it is
|
||||||
|
/// not returned.
|
||||||
|
free_old_config_path: bool,
|
||||||
|
) !ConfigWithPath {
|
||||||
|
if (config_path) |path| {
|
||||||
if (Config.loadFromFile(allocator, path)) |conf| {
|
if (Config.loadFromFile(allocator, path)) |conf| {
|
||||||
return ConfigWithPath{
|
return ConfigWithPath{
|
||||||
.config = conf,
|
.config = conf,
|
||||||
@ -56,6 +62,9 @@ fn getConfig(allocator: std.mem.Allocator, config: ConfigWithPath) !ConfigWithPa
|
|||||||
\\Falling back to a lookup in the local and global configuration folders
|
\\Falling back to a lookup in the local and global configuration folders
|
||||||
\\
|
\\
|
||||||
, .{path});
|
, .{path});
|
||||||
|
if (free_old_config_path) {
|
||||||
|
allocator.free(path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (try known_folders.getPath(allocator, .local_configuration)) |path| {
|
if (try known_folders.getPath(allocator, .local_configuration)) |path| {
|
||||||
@ -65,6 +74,7 @@ fn getConfig(allocator: std.mem.Allocator, config: ConfigWithPath) !ConfigWithPa
|
|||||||
.config_path = path,
|
.config_path = path,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
allocator.free(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (try known_folders.getPath(allocator, .global_configuration)) |path| {
|
if (try known_folders.getPath(allocator, .global_configuration)) |path| {
|
||||||
@ -74,6 +84,7 @@ fn getConfig(allocator: std.mem.Allocator, config: ConfigWithPath) !ConfigWithPa
|
|||||||
.config_path = path,
|
.config_path = path,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
allocator.free(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConfigWithPath{
|
return ConfigWithPath{
|
||||||
@ -82,19 +93,130 @@ fn getConfig(allocator: std.mem.Allocator, config: ConfigWithPath) !ConfigWithPa
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const ParseArgsResult = enum { proceed, exit };
|
||||||
|
fn parseArgs(
|
||||||
|
allocator: std.mem.Allocator,
|
||||||
|
config: *ConfigWithPath,
|
||||||
|
) !ParseArgsResult {
|
||||||
|
const ArgId = enum {
|
||||||
|
help,
|
||||||
|
version,
|
||||||
|
config,
|
||||||
|
@"enable-debug-log",
|
||||||
|
@"config-path",
|
||||||
|
};
|
||||||
|
const arg_id_map = std.ComptimeStringMap(ArgId, comptime blk: {
|
||||||
|
const fields = @typeInfo(ArgId).Enum.fields;
|
||||||
|
const KV = std.meta.Tuple(&.{ []const u8, ArgId });
|
||||||
|
var pairs: [fields.len]KV = undefined;
|
||||||
|
for (pairs) |*pair, i| pair.* = .{ fields[i].name, @intToEnum(ArgId, fields[i].value) };
|
||||||
|
break :blk pairs[0..];
|
||||||
|
});
|
||||||
|
const help_message: []const u8 = comptime help_message: {
|
||||||
|
var help_message: []const u8 =
|
||||||
|
\\Usage: zls [command]
|
||||||
|
\\
|
||||||
|
\\Commands:
|
||||||
|
\\
|
||||||
|
\\
|
||||||
|
;
|
||||||
|
const InfoMap = std.enums.EnumArray(ArgId, []const u8);
|
||||||
|
var cmd_infos: InfoMap = InfoMap.init(.{
|
||||||
|
.help = "Prints this message.",
|
||||||
|
.version = "Prints the compiler version with which the server was compiled.",
|
||||||
|
.@"enable-debug-log" = "Enables debug logs.",
|
||||||
|
.@"config-path" = "Specify the path to a configuration file specifying LSP behaviour.",
|
||||||
|
.config = "Run the ZLS configuration wizard.",
|
||||||
|
});
|
||||||
|
var info_it = cmd_infos.iterator();
|
||||||
|
while (info_it.next()) |entry| {
|
||||||
|
help_message = help_message ++ std.fmt.comptimePrint(" --{s}: {s}\n", .{ @tagName(entry.key), entry.value.* });
|
||||||
|
}
|
||||||
|
help_message = help_message ++ "\n";
|
||||||
|
break :help_message help_message;
|
||||||
|
};
|
||||||
|
|
||||||
|
var args_it = try std.process.ArgIterator.initWithAllocator(allocator);
|
||||||
|
defer args_it.deinit();
|
||||||
|
if (!args_it.skip()) @panic("Could not find self argument");
|
||||||
|
|
||||||
|
// Makes behavior of enabling debug more logging consistent regardless of argument order.
|
||||||
|
var specified = std.enums.EnumArray(ArgId, bool).initFill(false);
|
||||||
|
var config_path: ?[]const u8 = null;
|
||||||
|
errdefer if (config_path) |path| allocator.free(path);
|
||||||
|
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
|
||||||
|
while (args_it.next()) |tok| {
|
||||||
|
if (!std.mem.startsWith(u8, tok, "--") or tok.len == 2) {
|
||||||
|
try stderr.print("{s}\n", .{help_message});
|
||||||
|
try stderr.print("Unexpected positional argument '{s}'.\n", .{tok});
|
||||||
|
return .exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
const argname = tok["--".len..];
|
||||||
|
const id = arg_id_map.get(argname) orelse {
|
||||||
|
try stderr.print("{s}\n", .{help_message});
|
||||||
|
try stderr.print("Unrecognized argument '{s}'.\n", .{argname});
|
||||||
|
return .exit;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (specified.get(id)) {
|
||||||
|
try stderr.print("{s}\n", .{help_message});
|
||||||
|
try stderr.print("Duplicate argument '{s}'.\n", .{argname});
|
||||||
|
return .exit;
|
||||||
|
}
|
||||||
|
specified.set(id, true);
|
||||||
|
|
||||||
|
switch (id) {
|
||||||
|
.help => {},
|
||||||
|
.version => {},
|
||||||
|
.@"enable-debug-log" => {},
|
||||||
|
.config => {},
|
||||||
|
.@"config-path" => {
|
||||||
|
const path = args_it.next() orelse {
|
||||||
|
try stderr.print("Expected configuration file path after --config-path argument.\n", .{});
|
||||||
|
return .exit;
|
||||||
|
};
|
||||||
|
config.config_path = try allocator.dupe(u8, path);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (specified.get(.help)) {
|
||||||
|
try stderr.print("{s}\n", .{help_message});
|
||||||
|
return .exit;
|
||||||
|
}
|
||||||
|
if (specified.get(.version)) {
|
||||||
|
try std.io.getStdOut().writer().print("Data Version: {s}\n", .{@tagName(build_options.data_version)});
|
||||||
|
return .exit;
|
||||||
|
}
|
||||||
|
if (specified.get(.config)) {
|
||||||
|
try setup.wizard(allocator);
|
||||||
|
return .exit;
|
||||||
|
}
|
||||||
|
if (specified.get(.@"enable-debug-log")) {
|
||||||
|
actual_log_level = .debug;
|
||||||
|
logger.info("Enabled debug logging.\n", .{});
|
||||||
|
}
|
||||||
|
if (specified.get(.@"config-path")) {
|
||||||
|
std.debug.assert(config.config_path != null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return .proceed;
|
||||||
|
}
|
||||||
|
|
||||||
const stack_frames = switch (zig_builtin.mode) {
|
const stack_frames = switch (zig_builtin.mode) {
|
||||||
.Debug => 10,
|
.Debug => 10,
|
||||||
else => 0,
|
else => 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn main() anyerror!void {
|
pub fn main() !void {
|
||||||
var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){};
|
var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){};
|
||||||
defer _ = gpa_state.deinit();
|
defer _ = gpa_state.deinit();
|
||||||
|
var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{};
|
||||||
|
|
||||||
var allocator = gpa_state.allocator();
|
const allocator: std.mem.Allocator = if (tracy.enable_allocation) tracy_state.allocator() else gpa_state.allocator();
|
||||||
if (tracy.enable_allocation) {
|
|
||||||
allocator = tracy.tracyAllocator(allocator).allocator();
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = ConfigWithPath{
|
var config = ConfigWithPath{
|
||||||
.config = undefined,
|
.config = undefined,
|
||||||
@ -102,32 +224,12 @@ pub fn main() anyerror!void {
|
|||||||
};
|
};
|
||||||
defer if (config.config_path) |path| allocator.free(path);
|
defer if (config.config_path) |path| allocator.free(path);
|
||||||
|
|
||||||
// Check arguments.
|
switch (try parseArgs(allocator, &config)) {
|
||||||
var args_it = try std.process.ArgIterator.initWithAllocator(allocator);
|
.proceed => {},
|
||||||
defer args_it.deinit();
|
.exit => return,
|
||||||
if (!args_it.skip()) @panic("Could not find self argument");
|
|
||||||
|
|
||||||
while (args_it.next()) |arg| {
|
|
||||||
// TODO add --help --version
|
|
||||||
if (std.mem.eql(u8, arg, "--debug-log")) {
|
|
||||||
actual_log_level = .debug;
|
|
||||||
std.debug.print("Enabled debug logging\n", .{});
|
|
||||||
} else if (std.mem.eql(u8, arg, "--config-path")) {
|
|
||||||
var path = args_it.next() orelse {
|
|
||||||
std.debug.print("Expected configuration file path after --config-path argument\n", .{});
|
|
||||||
std.os.exit(1);
|
|
||||||
};
|
|
||||||
config.config_path = try allocator.dupe(u8, path);
|
|
||||||
} else if (std.mem.eql(u8, arg, "config") or std.mem.eql(u8, arg, "configure")) {
|
|
||||||
try setup.wizard(allocator);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
std.debug.print("Unrecognized argument {s}\n", .{arg});
|
|
||||||
std.os.exit(1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
config = try getConfig(allocator, config);
|
config = try getConfig(allocator, config.config_path, true);
|
||||||
if (config.config_path == null) {
|
if (config.config_path == null) {
|
||||||
logger.info("No config file zls.json found.", .{});
|
logger.info("No config file zls.json found.", .{});
|
||||||
}
|
}
|
||||||
|
@ -67,20 +67,9 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
|
|
||||||
switch (node_tags[node]) {
|
switch (node_tags[node]) {
|
||||||
.block, .block_semicolon, .block_two, .block_two_semicolon => {
|
.block, .block_semicolon, .block_two, .block_two_semicolon => {
|
||||||
const statements: []const Ast.Node.Index = switch (node_tags[node]) {
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs],
|
const statements = ast.blockStatements(tree, node, &buffer).?;
|
||||||
.block_two, .block_two_semicolon => blk: {
|
|
||||||
const statements = &[_]Ast.Node.Index{ datas[node].lhs, datas[node].rhs };
|
|
||||||
const len: usize = if (datas[node].lhs == 0)
|
|
||||||
@as(usize, 0)
|
|
||||||
else if (datas[node].rhs == 0)
|
|
||||||
@as(usize, 1)
|
|
||||||
else
|
|
||||||
@as(usize, 2);
|
|
||||||
break :blk statements[0..len];
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
for (statements) |stmt|
|
for (statements) |stmt|
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = stmt, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = stmt, .handle = handle }, decl, encoding, context, handler);
|
||||||
},
|
},
|
||||||
@ -319,13 +308,10 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.async_call_comma,
|
.async_call_comma,
|
||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> |c| {
|
=> {
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const call: Ast.full.Call = switch (c) {
|
const call = ast.callFull(tree, node, &buf).?;
|
||||||
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
|
||||||
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&buf, node),
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
if (call.ast.fn_expr != 0)
|
if (call.ast.fn_expr != 0)
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = call.ast.fn_expr, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = call.ast.fn_expr, .handle = handle }, decl, encoding, context, handler);
|
||||||
|
|
||||||
@ -380,18 +366,9 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.builtin_call_comma,
|
.builtin_call_comma,
|
||||||
.builtin_call_two,
|
.builtin_call_two,
|
||||||
.builtin_call_two_comma,
|
.builtin_call_two_comma,
|
||||||
=> |builtin_tag| {
|
=> {
|
||||||
const data = datas[node];
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const params = switch (builtin_tag) {
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (params) |param|
|
for (params) |param|
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler);
|
||||||
|
@ -282,7 +282,7 @@ pub const Configuration = struct {
|
|||||||
zig_exe_path: ?[]const u8,
|
zig_exe_path: ?[]const u8,
|
||||||
warn_style: ?bool,
|
warn_style: ?bool,
|
||||||
build_runner_path: ?[]const u8,
|
build_runner_path: ?[]const u8,
|
||||||
build_runner_cache_path: ?[]const u8,
|
global_cache_path: ?[]const u8,
|
||||||
enable_semantic_tokens: ?bool,
|
enable_semantic_tokens: ?bool,
|
||||||
enable_inlay_hints: ?bool,
|
enable_inlay_hints: ?bool,
|
||||||
inlay_hints_show_builtin: ?bool,
|
inlay_hints_show_builtin: ?bool,
|
||||||
|
@ -317,20 +317,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
try writeToken(builder, main_token - 2, .label);
|
try writeToken(builder, main_token - 2, .label);
|
||||||
}
|
}
|
||||||
|
|
||||||
const statements: []const Ast.Node.Index = switch (tag) {
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
const statements = ast.blockStatements(tree, node, &buffer).?;
|
||||||
.block_two, .block_two_semicolon => blk: {
|
|
||||||
const statements = &[_]Ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
|
|
||||||
const len: usize = if (node_data[node].lhs == 0)
|
|
||||||
@as(usize, 0)
|
|
||||||
else if (node_data[node].rhs == 0)
|
|
||||||
@as(usize, 1)
|
|
||||||
else
|
|
||||||
@as(usize, 2);
|
|
||||||
break :blk statements[0..len];
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (statements) |child| {
|
for (statements) |child| {
|
||||||
if (node_tags[child].isContainerField()) {
|
if (node_tags[child].isContainerField()) {
|
||||||
@ -390,15 +378,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.tagged_union_two_trailing,
|
.tagged_union_two_trailing,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const decl: Ast.full.ContainerDecl = switch (tag) {
|
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buf).?;
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
|
||||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
|
||||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node),
|
|
||||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node),
|
|
||||||
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node),
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
try writeToken(builder, decl.layout_token, .keyword);
|
try writeToken(builder, decl.layout_token, .keyword);
|
||||||
try writeToken(builder, decl.ast.main_token, .keyword);
|
try writeToken(builder, decl.ast.main_token, .keyword);
|
||||||
@ -427,7 +407,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.identifier => {
|
.identifier => {
|
||||||
const name = tree.getNodeSource(node);
|
const name = tree.getNodeSource(node);
|
||||||
|
|
||||||
if(std.mem.eql(u8,name, "undefined")) {
|
if (std.mem.eql(u8, name, "undefined")) {
|
||||||
return try writeToken(builder, main_token, .keywordLiteral);
|
return try writeToken(builder, main_token, .keywordLiteral);
|
||||||
} else if (analysis.isTypeIdent(name)) {
|
} else if (analysis.isTypeIdent(name)) {
|
||||||
return try writeToken(builder, main_token, .type);
|
return try writeToken(builder, main_token, .type);
|
||||||
@ -682,11 +662,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var params: [1]Ast.Node.Index = undefined;
|
var params: [1]Ast.Node.Index = undefined;
|
||||||
const call: Ast.full.Call = switch (tag) {
|
const call = ast.callFull(tree, node, ¶ms).?;
|
||||||
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
|
||||||
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node),
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
try writeToken(builder, call.async_token, .keyword);
|
try writeToken(builder, call.async_token, .keyword);
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr });
|
||||||
@ -757,17 +733,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.builtin_call_two,
|
.builtin_call_two,
|
||||||
.builtin_call_two_comma,
|
.builtin_call_two_comma,
|
||||||
=> {
|
=> {
|
||||||
const data = node_data[node];
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
const params = switch (tag) {
|
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
|
||||||
&[_]Ast.Node.Index{}
|
|
||||||
else if (data.rhs == 0)
|
|
||||||
&[_]Ast.Node.Index{data.lhs}
|
|
||||||
else
|
|
||||||
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
try writeToken(builder, main_token, .builtin);
|
try writeToken(builder, main_token, .builtin);
|
||||||
for (params) |param|
|
for (params) |param|
|
||||||
|
@ -1,18 +1,28 @@
|
|||||||
const root = @import("@build@");
|
const root = @import("@build@");
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const fmt = std.fmt;
|
|
||||||
const io = std.io;
|
|
||||||
const log = std.log;
|
const log = std.log;
|
||||||
const process = std.process;
|
const process = std.process;
|
||||||
const Builder = std.build.Builder;
|
const Builder = std.build.Builder;
|
||||||
const Pkg = std.build.Pkg;
|
|
||||||
const InstallArtifactStep = std.build.InstallArtifactStep;
|
const InstallArtifactStep = std.build.InstallArtifactStep;
|
||||||
const LibExeObjStep = std.build.LibExeObjStep;
|
const LibExeObjStep = std.build.LibExeObjStep;
|
||||||
const ArrayList = std.ArrayList;
|
|
||||||
|
|
||||||
|
pub const BuildConfig = struct {
|
||||||
|
packages: []Pkg,
|
||||||
|
include_dirs: []IncludeDir,
|
||||||
|
|
||||||
|
pub const Pkg = struct {
|
||||||
|
name: []const u8,
|
||||||
|
path: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const IncludeDir = struct {
|
||||||
|
path: []const u8,
|
||||||
|
system: bool,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
///! This is a modified build runner to extract information out of build.zig
|
///! This is a modified build runner to extract information out of build.zig
|
||||||
///! Modified from the std.special.build_runner
|
///! Modified version of lib/build_runner.zig
|
||||||
pub fn main() !void {
|
pub fn main() !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
@ -55,48 +65,97 @@ pub fn main() !void {
|
|||||||
builder.resolveInstallPrefix(null, Builder.DirList{});
|
builder.resolveInstallPrefix(null, Builder.DirList{});
|
||||||
try runBuild(builder);
|
try runBuild(builder);
|
||||||
|
|
||||||
const stdout_stream = io.getStdOut().writer();
|
var packages = std.ArrayList(BuildConfig.Pkg).init(allocator);
|
||||||
|
defer packages.deinit();
|
||||||
|
|
||||||
|
var include_dirs = std.ArrayList(BuildConfig.IncludeDir).init(allocator);
|
||||||
|
defer include_dirs.deinit();
|
||||||
|
|
||||||
// TODO: We currently add packages from every LibExeObj step that the install step depends on.
|
// TODO: We currently add packages from every LibExeObj step that the install step depends on.
|
||||||
// Should we error out or keep one step or something similar?
|
// Should we error out or keep one step or something similar?
|
||||||
// We also flatten them, we should probably keep the nested structure.
|
// We also flatten them, we should probably keep the nested structure.
|
||||||
for (builder.top_level_steps.items) |tls| {
|
for (builder.top_level_steps.items) |tls| {
|
||||||
for (tls.step.dependencies.items) |step| {
|
for (tls.step.dependencies.items) |step| {
|
||||||
try processStep(stdout_stream, step);
|
try processStep(&packages, &include_dirs, step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try std.json.stringify(
|
||||||
|
BuildConfig{
|
||||||
|
.packages = packages.items,
|
||||||
|
.include_dirs = include_dirs.items,
|
||||||
|
},
|
||||||
|
.{ .whitespace = .{} },
|
||||||
|
std.io.getStdOut().writer(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn processStep(stdout_stream: anytype, step: *std.build.Step) anyerror!void {
|
fn processStep(
|
||||||
|
packages: *std.ArrayList(BuildConfig.Pkg),
|
||||||
|
include_dirs: *std.ArrayList(BuildConfig.IncludeDir),
|
||||||
|
step: *std.build.Step,
|
||||||
|
) anyerror!void {
|
||||||
if (step.cast(InstallArtifactStep)) |install_exe| {
|
if (step.cast(InstallArtifactStep)) |install_exe| {
|
||||||
|
try processIncludeDirs(include_dirs, install_exe.artifact.include_dirs.items);
|
||||||
for (install_exe.artifact.packages.items) |pkg| {
|
for (install_exe.artifact.packages.items) |pkg| {
|
||||||
try processPackage(stdout_stream, pkg);
|
try processPackage(packages, pkg);
|
||||||
}
|
}
|
||||||
} else if (step.cast(LibExeObjStep)) |exe| {
|
} else if (step.cast(LibExeObjStep)) |exe| {
|
||||||
|
try processIncludeDirs(include_dirs, exe.include_dirs.items);
|
||||||
for (exe.packages.items) |pkg| {
|
for (exe.packages.items) |pkg| {
|
||||||
try processPackage(stdout_stream, pkg);
|
try processPackage(packages, pkg);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (step.dependencies.items) |unknown_step| {
|
for (step.dependencies.items) |unknown_step| {
|
||||||
try processStep(stdout_stream, unknown_step);
|
try processStep(packages, include_dirs, unknown_step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn processPackage(out_stream: anytype, pkg: Pkg) anyerror!void {
|
fn processPackage(
|
||||||
const source = if (@hasField(Pkg, "source")) pkg.source else pkg.path;
|
packages: *std.ArrayList(BuildConfig.Pkg),
|
||||||
switch (source) {
|
pkg: std.build.Pkg,
|
||||||
.path => |path| try out_stream.print("{s}\x00{s}\n", .{ pkg.name, path }),
|
) anyerror!void {
|
||||||
.generated => |generated| if (generated.path != null) try out_stream.print("{s}\x00{s}\n", .{ pkg.name, generated.path.? }),
|
for (packages.items) |package| {
|
||||||
|
if (std.mem.eql(u8, package.name, pkg.name)) return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = if (@hasField(std.build.Pkg, "source")) pkg.source else pkg.path;
|
||||||
|
const maybe_path = switch (source) {
|
||||||
|
.path => |path| path,
|
||||||
|
.generated => |generated| generated.path,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (maybe_path) |path| {
|
||||||
|
try packages.append(.{ .name = pkg.name, .path = path });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pkg.dependencies) |dependencies| {
|
if (pkg.dependencies) |dependencies| {
|
||||||
for (dependencies) |dep| {
|
for (dependencies) |dep| {
|
||||||
try processPackage(out_stream, dep);
|
try processPackage(packages, dep);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn processIncludeDirs(
|
||||||
|
include_dirs: *std.ArrayList(BuildConfig.IncludeDir),
|
||||||
|
dirs: []std.build.LibExeObjStep.IncludeDir,
|
||||||
|
) !void {
|
||||||
|
outer: for (dirs) |dir| {
|
||||||
|
const candidate: BuildConfig.IncludeDir = switch (dir) {
|
||||||
|
.raw_path => |path| .{ .path = path, .system = false },
|
||||||
|
.raw_path_system => |path| .{ .path = path, .system = true },
|
||||||
|
else => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (include_dirs.items) |include_dir| {
|
||||||
|
if (std.mem.eql(u8, candidate.path, include_dir.path)) continue :outer;
|
||||||
|
}
|
||||||
|
|
||||||
|
try include_dirs.append(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn runBuild(builder: *Builder) anyerror!void {
|
fn runBuild(builder: *Builder) anyerror!void {
|
||||||
switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) {
|
switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) {
|
||||||
.Void => root.build(builder),
|
.Void => root.build(builder),
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 2d8723b69b39721eadcc296451012828899c0f17
|
Subproject commit f493d4aa8ba8141d9680473fad007d8a6348628e
|
183
src/translate_c.zig
Normal file
183
src/translate_c.zig
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
const Config = @import("Config.zig");
|
||||||
|
const ast = @import("ast.zig");
|
||||||
|
const Ast = std.zig.Ast;
|
||||||
|
const URI = @import("uri.zig");
|
||||||
|
|
||||||
|
/// converts a `@cInclude` node into an equivalent c header file
|
||||||
|
/// which can then be handed over to `zig translate-c`
|
||||||
|
/// Caller owns returned memory.
|
||||||
|
///
|
||||||
|
/// **Example**
|
||||||
|
/// ```zig
|
||||||
|
/// const glfw = @cImport(
|
||||||
|
/// @cDefine("GLFW_INCLUDE_VULKAN", {})
|
||||||
|
/// @cInclude("GLFW/glfw3.h")
|
||||||
|
/// );
|
||||||
|
/// ```
|
||||||
|
/// gets converted into:
|
||||||
|
/// ```c
|
||||||
|
/// #define GLFW_INCLUDE_VULKAN
|
||||||
|
/// #include "GLFW/glfw3.h"
|
||||||
|
/// ```
|
||||||
|
pub fn convertCInclude(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index) error{ OutOfMemory, Unsupported }![]const u8 {
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
|
||||||
|
std.debug.assert(ast.isBuiltinCall(tree, node));
|
||||||
|
std.debug.assert(std.mem.eql(u8, Ast.tokenSlice(tree, main_tokens[node]), "@cImport"));
|
||||||
|
|
||||||
|
var output = std.ArrayList(u8).init(allocator);
|
||||||
|
errdefer output.deinit();
|
||||||
|
|
||||||
|
var stack_allocator = std.heap.stackFallback(512, allocator);
|
||||||
|
|
||||||
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
|
for (ast.builtinCallParams(tree, node, &buffer).?) |child| {
|
||||||
|
try convertCIncludeInternal(stack_allocator.get(), tree, child, &output);
|
||||||
|
}
|
||||||
|
|
||||||
|
return output.toOwnedSlice();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, output: *std.ArrayList(u8)) error{ OutOfMemory, Unsupported }!void {
|
||||||
|
const node_tags = tree.nodes.items(.tag);
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
|
||||||
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
|
if (ast.isBlock(tree, node)) {
|
||||||
|
const FrameSize = @sizeOf(@Frame(convertCIncludeInternal));
|
||||||
|
var child_frame = try allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
|
||||||
|
defer allocator.free(child_frame);
|
||||||
|
|
||||||
|
for (ast.blockStatements(tree, node, &buffer).?) |statement| {
|
||||||
|
try await @asyncCall(child_frame, {}, convertCIncludeInternal, .{ allocator, tree, statement, output });
|
||||||
|
}
|
||||||
|
} else if (ast.builtinCallParams(tree, node, &buffer)) |params| {
|
||||||
|
if (params.len < 1) return;
|
||||||
|
|
||||||
|
const call_name = Ast.tokenSlice(tree, main_tokens[node]);
|
||||||
|
|
||||||
|
if (node_tags[params[0]] != .string_literal) return error.Unsupported;
|
||||||
|
const first = extractString(Ast.tokenSlice(tree, main_tokens[params[0]]));
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, call_name, "@cInclude")) {
|
||||||
|
try output.writer().print("#include <{s}>\n", .{first});
|
||||||
|
} else if (std.mem.eql(u8, call_name, "@cDefine")) {
|
||||||
|
if (params.len < 2) return;
|
||||||
|
|
||||||
|
var buffer2: [2]Ast.Node.Index = undefined;
|
||||||
|
const is_void = if (ast.blockStatements(tree, params[1], &buffer2)) |block| block.len == 0 else false;
|
||||||
|
|
||||||
|
if (is_void) {
|
||||||
|
try output.writer().print("#define {s}\n", .{first});
|
||||||
|
} else {
|
||||||
|
if (node_tags[params[1]] != .string_literal) return error.Unsupported;
|
||||||
|
const second = extractString(Ast.tokenSlice(tree, main_tokens[params[1]]));
|
||||||
|
try output.writer().print("#define {s} {s}\n", .{ first, second });
|
||||||
|
}
|
||||||
|
} else if (std.mem.eql(u8, call_name, "@cUndef")) {
|
||||||
|
try output.writer().print("#undefine {s}\n", .{first});
|
||||||
|
} else {
|
||||||
|
return error.Unsupported;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// takes a c header file and returns the result from calling `zig translate-c`
|
||||||
|
/// returns the file path to the generated zig file
|
||||||
|
/// Caller owns returned memory.
|
||||||
|
pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?[]const u8 {
|
||||||
|
const file_path = try std.fs.path.join(allocator, &[_][]const u8{ config.global_cache_path.?, "cimport.h" });
|
||||||
|
defer allocator.free(file_path);
|
||||||
|
|
||||||
|
var file = std.fs.createFileAbsolute(file_path, .{}) catch |err| {
|
||||||
|
std.log.warn("failed to create file '{s}': {}", .{ file_path, err });
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
defer file.close();
|
||||||
|
defer std.fs.deleteFileAbsolute(file_path) catch |err| {
|
||||||
|
std.log.warn("failed to delete file '{s}': {}", .{ file_path, err });
|
||||||
|
};
|
||||||
|
|
||||||
|
_ = file.write(source) catch |err| {
|
||||||
|
std.log.warn("failed to write to '{s}': {}", .{ file_path, err });
|
||||||
|
};
|
||||||
|
|
||||||
|
const base_include_dirs = blk: {
|
||||||
|
const target_info = std.zig.system.NativeTargetInfo.detect(allocator, .{}) catch break :blk null;
|
||||||
|
var native_paths = std.zig.system.NativePaths.detect(allocator, target_info) catch break :blk null;
|
||||||
|
defer native_paths.deinit();
|
||||||
|
|
||||||
|
break :blk native_paths.include_dirs.toOwnedSlice();
|
||||||
|
};
|
||||||
|
defer if (base_include_dirs) |dirs| {
|
||||||
|
for (dirs) |path| {
|
||||||
|
allocator.free(path);
|
||||||
|
}
|
||||||
|
allocator.free(dirs);
|
||||||
|
};
|
||||||
|
|
||||||
|
const base_args = &[_][]const u8{
|
||||||
|
config.zig_exe_path.?,
|
||||||
|
"translate-c",
|
||||||
|
"--enable-cache",
|
||||||
|
"--zig-lib-dir",
|
||||||
|
config.zig_lib_path.?,
|
||||||
|
"--cache-dir",
|
||||||
|
config.global_cache_path.?,
|
||||||
|
};
|
||||||
|
|
||||||
|
const argc = base_args.len + 2 * (include_dirs.len + if (base_include_dirs) |dirs| dirs.len else 0) + 1;
|
||||||
|
var argv = try std.ArrayListUnmanaged([]const u8).initCapacity(allocator, argc);
|
||||||
|
defer argv.deinit(allocator);
|
||||||
|
|
||||||
|
argv.appendSliceAssumeCapacity(base_args);
|
||||||
|
|
||||||
|
if (base_include_dirs) |dirs| {
|
||||||
|
for (dirs) |include_dir| {
|
||||||
|
argv.appendAssumeCapacity("-I");
|
||||||
|
argv.appendAssumeCapacity(include_dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (include_dirs) |include_dir| {
|
||||||
|
argv.appendAssumeCapacity("-I");
|
||||||
|
argv.appendAssumeCapacity(include_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
argv.appendAssumeCapacity(file_path);
|
||||||
|
|
||||||
|
const result = std.ChildProcess.exec(.{
|
||||||
|
.allocator = allocator,
|
||||||
|
.argv = argv.items,
|
||||||
|
}) catch |err| {
|
||||||
|
std.log.err("Failed to execute zig translate-c process, error: {}", .{err});
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
defer allocator.free(result.stdout);
|
||||||
|
defer allocator.free(result.stderr);
|
||||||
|
|
||||||
|
return switch (result.term) {
|
||||||
|
.Exited => |code| if (code == 0) {
|
||||||
|
return try allocator.dupe(u8, std.mem.sliceTo(result.stdout, '\n'));
|
||||||
|
} else {
|
||||||
|
// TODO convert failure to `textDocument/publishDiagnostics`
|
||||||
|
std.log.err("zig translate-c process failed, code: {}, stderr: '{s}'", .{ code, result.stderr });
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
std.log.err("zig translate-c process terminated '{}'", .{result.term});
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extractString(str: []const u8) []const u8 {
|
||||||
|
if (std.mem.startsWith(u8, str, "\"") and std.mem.endsWith(u8, str, "\"")) {
|
||||||
|
return str[1 .. str.len - 1];
|
||||||
|
} else {
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user