Merge branch 'dev'

This commit is contained in:
Filipe Rodrigues 2025-08-15 01:41:47 +01:00
commit 7dd731049c
Signed by: zenithsiz
SSH Key Fingerprint: SHA256:Mb5ppb3Sh7IarBO/sBTXLHbYEOz37hJAlslLQPPAPaU
54 changed files with 3183 additions and 3226 deletions

7
.prettierrc Normal file
View File

@ -0,0 +1,7 @@
{
"trailingComma": "es5",
"useTabs": true,
"semi": true,
"singleQuote": true,
"singleAttributePerLine": true
}

13
.taplo.toml Normal file
View File

@ -0,0 +1,13 @@
[formatting]
indent_string = "\t"
align_entries = true
column_width = 120
[[rule]]
include = ["**/Cargo.toml"]
keys = ["dependencies", "workspace"]
[rule.formatting]
reorder_keys = true
reorder_arrays = true
reorder_inline_tables = true

47
.vscode/settings.json vendored
View File

@ -1,23 +1,28 @@
{
"cSpell.words": [
"Cmds",
"Cmpt",
"cmpts",
"dashmap",
"debouncer",
"filetime",
"indexmap",
"itertools",
"mapref",
"npath",
"oneshot",
"petgraph",
"PKGBUILD",
"rwlock",
"smallvec",
"thiserror",
"yeet",
"Zbuild"
],
"rust-analyzer.cargo.features": "all"
"cSpell.words": [
"Cmds",
"Cmpt",
"cmpts",
"dashmap",
"debouncer",
"filetime",
"indexmap",
"indicatif",
"inotify",
"itertools",
"mapref",
"metavar",
"npath",
"oneshot",
"petgraph",
"PKGBUILD",
"rwlock",
"smallvec",
"tempdir",
"thiserror",
"yeet",
"Zbuild",
"zutil"
],
"rust-analyzer.cargo.features": "all"
}

View File

@ -1,3 +1,15 @@
# 0.1.10
# Major
- Replaced yaml file with custom language `.zb`.
- Added a vscode extension for the custom `.zb` language.
- Added progress bar during building
- Added flags `--keep-going` and `--always-build`
# 0.1.9
# Major

881
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,33 +1,38 @@
# TODO: Add `LICENSE-APACHE` to `license-file` once that key supports multiple licenses
[package]
edition = "2021"
name = "zbuild"
edition = "2024"
name = "zbuild"
description = "Make-like build system"
license-file = "LICENSE-MIT"
version = "0.1.9"
repository = "https://github.com/zenithsiz/zbuild"
publish = ["filipejr"]
license = "MIT OR Apache-2.0"
version = "0.1.10"
repository = "https://github.com/zenithsiz/zbuild"
publish = ["filipejr"]
[dependencies]
anyhow = "1.0.86"
async-broadcast = "0.7.1"
clap = { version = "4.5.15", features = ["derive"] }
console-subscriber = { version = "0.4.0", optional = true }
dashmap = "6.0.1"
futures = "0.3.30"
indexmap = { version = "2.4.0", features = ["serde"] }
itertools = "0.13.0"
notify = "6.1.1"
notify-debouncer-full = "0.3.1"
pin-project = "1.1.5"
serde = { version = "1.0.205", features = ["derive"] }
serde_yaml = "0.9.34"
smallvec = { version = "1.13.2", features = ["may_dangle"] }
tokio = { version = "1.39.2", features = ["full"] }
tokio-stream = "0.1.15"
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
async-broadcast = "0.7.2"
clap = { features = ["derive"], version = "4.5.27" }
console-subscriber = { optional = true, version = "0.4.1" }
dashmap = "6.1.0"
futures = "0.3.31"
indexmap = "2.7.1"
indicatif = "0.17.11"
itertools = "0.14.0"
notify = "8.0.0"
notify-debouncer-full = "0.5.0"
pin-project = "1.1.8"
smallvec = { features = ["may_dangle"], version = "1.13.2" }
tokio = { features = ["full"], version = "1.43.0" }
tokio-stream = "0.1.17"
tracing = "0.1.41"
tracing-subscriber = { features = ["env-filter"], version = "0.3.19" }
unicode-ident = "1.0.16"
yoke = "0.7.5"
zutil-app-error = { git = "https://github.com/Zenithsiz/zutil", rev = "5363bba6ced162185a1eb5a132cce499bfc5d818" }
[dev-dependencies]
tempfile = "3.16.0"
tracing-test = { version = "0.2.5", features = ["no-env-filter"] }
[features]
@ -36,19 +41,12 @@ tokio-console = ["dep:console-subscriber"]
[lints]
# This project doesn't require unsafe code (outside of some modules)
rust.unsafe_code = "deny"
rust.unsafe_code = "deny"
rust.unsafe_op_in_unsafe_fn = "deny"
# Group lints
# Note: We warn on the full group list, and then `allow` the
# lints of each group that don't make sense for this project.
# This is so that when new lints are added we can immediatly start
# receiving *warnings*, and can then remove them if they're not
# relevant.
clippy.pedantic = { level = "warn", priority = -1 }
clippy.nursery = { level = "warn", priority = -1 }
clippy.restriction = { level = "warn", priority = -1 }
clippy.blanket_clippy_restriction_lints = "allow"
clippy.nursery = { level = "warn", priority = -1 }
# Prefer `expect` instead of `unwrap`
clippy.unwrap_used = "deny"
@ -58,109 +56,48 @@ clippy.expect_used = "allow"
rust.elided_lifetimes_in_paths = "deny"
# `Debug` / `Copy` should be implemented wherever possible
rust.missing_copy_implementations = "warn"
rust.missing_copy_implementations = "warn"
rust.missing_debug_implementations = "warn"
# Misc.
rust.noop_method_call = "warn"
rust.unused_results = "warn"
rust.noop_method_call = "warn"
rust.unused_results = "warn"
rust.explicit_outlives_requirements = "warn"
rust.fuzzy_provenance_casts = "deny"
rust.meta_variable_misuse = "warn"
rust.must_not_suspend = "warn"
rust.single_use_lifetimes = "warn"
rust.trivial_numeric_casts = "warn"
rust.unused_lifetimes = "warn"
rust.unused_macro_rules = "warn"
rust.variant_size_differences = "warn"
rust.unused_crate_dependencies = "warn"
rust.meta_variable_misuse = "warn"
rust.must_not_suspend = "warn"
rust.single_use_lifetimes = "warn"
rust.trivial_numeric_casts = "warn"
rust.unused_lifetimes = "warn"
rust.unused_macro_rules = "warn"
rust.variant_size_differences = "warn"
clippy.let_underscore_untyped = "warn"
# False positives
clippy.significant_drop_tightening = "allow"
# Can't easily `allow` / `expect` it.
# TODO: Issue seems to be with the feature `stmt_expr_attributes`?
clippy.arithmetic_side_effects = "allow"
# We don't need this kind of control over this application
clippy.pub_use = "allow"
clippy.question_mark_used = "allow"
clippy.integer_division = "allow"
clippy.exhaustive_enums = "allow"
clippy.exhaustive_structs = "allow"
clippy.impl_trait_in_params = "allow"
clippy.unreachable = "allow"
clippy.mem_forget = "allow"
clippy.shadow_same = "allow"
clippy.shadow_reuse = "allow"
clippy.shadow_unrelated = "allow" # TODO: Maybe check this one every once in a while? Pretty noisy though
clippy.min_ident_chars = "allow" # Useful for generics such as `f: impl FnOnce()`
clippy.single_call_fn = "allow" # It's still useful to separate blocks of code into functions
clippy.float_arithmetic = "allow"
clippy.struct_field_names = "allow"
clippy.iter_over_hash_type = "allow"
clippy.non_ascii_literal = "allow"
# We prefer the short version
clippy.pub_with_shorthand = "allow"
clippy.pub_without_shorthand = "warn"
# We prefer the semicolon inside
clippy.semicolon_inside_block = "allow"
clippy.semicolon_outside_block = "deny"
# Style
clippy.implicit_return = "allow"
clippy.multiple_inherent_impl = "allow"
clippy.pattern_type_mismatch = "allow"
clippy.match_bool = "allow"
clippy.single_match_else = "allow" # Note: `match` reads easier than `if / else`
clippy.option_if_let_else = "allow"
clippy.self_named_module_files = "allow"
clippy.items_after_statements = "allow"
clippy.module_name_repetitions = "allow"
clippy.module_inception = "allow"
clippy.implicit_return = "allow"
clippy.multiple_inherent_impl = "allow"
clippy.pattern_type_mismatch = "allow"
clippy.match_bool = "allow"
clippy.single_match_else = "allow" # Note: `match` reads easier than `if / else`
clippy.option_if_let_else = "allow"
clippy.self_named_module_files = "allow"
clippy.items_after_statements = "allow"
clippy.module_name_repetitions = "allow"
clippy.module_inception = "allow"
clippy.separated_literal_suffix = "allow"
clippy.ref_patterns = "allow" # Matching on a vale and adding `ref` is easier than matching on ref and de-referencing values within the body
clippy.ref_patterns = "allow" # Matching on a vale and adding `ref` is easier than matching on ref and de-referencing values within the body
# Performance of floats isn't paramount
clippy.suboptimal_flops = "allow"
# Some functions might return an error / be async in the future
clippy.unnecessary_wraps = "allow"
clippy.unused_async = "allow"
# We use proper error types when it matters what errors can be returned, else = "allow"
# such as when using `anyhow`, we just assume the caller won't check *what* error
# happened and instead just bubbles it up
# Callers don't care about which error happened.
clippy.missing_errors_doc = "allow"
# We don't expose certain entities that should be documented for internal use.
rustdoc.private_intra_doc_links = "allow"
# Too noisy with self-contained panics that the caller will never see.
clippy.missing_panics_doc = "allow"
# This is too prevalent on generic functions, which we don't want to ALWAYS be `Send`
clippy.future_not_send = "allow"
# TODO: Use `core` / `alloc` instead of `std` where possible?
clippy.std_instead_of_core = "allow"
clippy.std_instead_of_alloc = "allow"
# Single-letter generics / lifetimes are fine when there isn't a specific meaning to the generic
clippy.single_char_lifetime_names = "allow"
# We don't need to annotate `#[inline]` to every single function
# TODO: Check if it might be required in some hot functions?
clippy.missing_inline_in_public_items = "allow"
# For most trait impls, the default is fine.
# TODO: Turn this off every once in a while and check if there's any
# performance improvements from implementing default functions?
clippy.missing_trait_methods = "allow"
# We only panic when it's an unrecoverable error
clippy.unwrap_in_result = "allow"
clippy.panic_in_result_fn = "allow"
# Sometimes small structs defined inline don't need documentation
clippy.missing_docs_in_private_items = "allow"
# We want to format paths with quotes
clippy.unnecessary_debug_formatting = "allow"

258
GUIDE.md
View File

@ -1,258 +0,0 @@
# Usage guide
> **Important**:
>
> See the [Unimplemented features](#Unimplemented-features) section for possible missing features described here
>
> Also see the [Limitations](#Limitations) section for details on why a certain feature may not be working properly yet.
## Core concepts
The 2 main core concepts used in zbuild are:
- Rules
- Items
Rules are objects which describe an action that can be taken to generate files.
Items can be files, rules, or even just some condition[^1]. Generally they are split into output items and dependency items.
Output items are those that a rule creates. These _cannot_ be other rules, as you cannot "create" a rule. Generally they are files created by the execution of the rule.
Dependency items are more general, in that they can be any kind of dependency: A file, a rule execution or conditions.
See [Rule Outputs](#rule-outputs) and [Rule Dependencies](#rule-dependencies) for more details on items.
See the [Rule](#rule) section about details on rules.
[^1]: Currently unimplemented
## Rules
Rules are the core concept of using zbuild, they are your way to explain to zbuild which actions do what.
By specifying an output and dependencies, whenever zbuild is asked for a target to build, it can analyze all rules and determine a way to generate the file required.
Rules have the following sections:
- `alias`: Aliases
- `out`: Outputs
- `deps`: Dependencies
- `exec`: Execution
Each one is optional, and may be skipped if irrelevant to the rule.
### Rule Aliases
(Also see: [Aliases](#aliases) section)
Rule aliases are scoped so that they are only resolved inside the body of the rule they're defined in. They _may_ shadow global aliases and will take priority.
### Rule Outputs
(Also see: [Dependency Files](#dependency-files) and sections)
Output items are those that you guarantee to `zbuild` that will exist after the execution of the rule.
They may be either regular files, or dependency files.
If they are dependency files, they will be considered dependencies,
_if they exist_ when the rule dependencies are checked.
You **must** guarantee that, if the rule is executed successfully all files actually exist.
The modification date of the rule output is the oldest file among all of the outputs.
Output items may include patterns, specified as `^(<pattern-name>)` inside any string. When checking if a rule can create an output, the pattern will be tested to see if any string substituted into the pattern outputs the item. If so, this string is given the string value for the whole rule.
These patterns may be used as dependencies, but only if the output items can resolve them.
Patterns may also include operators. Specified as `^(<pattern-name>::<op1>::<op2>)`. The following operators are currently supported:
1. `non_empty`
Ensures the pattern cannot match an empty string
You may specify them as an array of the following:
1. Regular file
Regular file are specified simply by a string with their path
2. Dependency files
Dependency files are specified with `deps_file: <file>` where `<file>` is the path of the file
### Rule Dependencies
(Also see: [Dependency Files](#dependency-files) )
Dependency items are those that you require to exist before the rule can be executed.
These may be either:
- (Static or Optional) Regular files
- (Static or Optional) Dependency files
- Rules
Static items are those that only need to exist in order to be considered up to date. They are useful for depending on, e.g. directories.
Regular files are just that, regular files, their modification date will be used to test if the output needs to be rebuilt.
Dependency files are regular files which are also checked for make dependencies after being considered up to date.
Optional file are regular files that do not result in an error if they do not exist.
Rule dependencies simply state that a rule must be run before them.
Even if two rules required the same rule in their dependencies, it will only be run once (per pattern). Rules are always considered out of date and will always be executed.
You may specify them as an array of the following:
1. Static item
Static items are specified with `static: <item>` where `<item>` is either a regular file or a dependency item
2. Optional file
Optional files are specified with `opt: <item>` where `<item>` is either a regular file or a dependency item
3. Regular file
Regular file are specified simply by a string with their path
4. Dependency files
Dependency files are specified with `deps_file: <file>` where `<file>` is the path of the file
5. Rules
Rules are specified with `{ rule: <rule-name>, pats: { <pat1>: <value1>, <pat2>: <value2 >} }`.
Where `<rule-name>` is the rule name, and `patX` is a pattern for the rule, with `valueX` as it's value.
For rules without patterns, you may simply use `rule: <rule-name>`.
### Rule execution
The execution of a rule consists in running a series of specified commands.
It should be noted that all commands are _not_ run in a shell. If you require a shell you may spawn it yourself with a command.
Each command is executed sequentially given it's order's rule.
However, it may be run parallel to other rules (or even the same rule with a different pattern).
You may also define a working directory for a command to be executed in. You may only do this at the granularly of the rule, not per command however.
A rule is executed whenever it becomes a target. This happens, for e.g. when you specify a rule name on the command line, or when zbuild determines the output files are out of date relative to the dependencies.
When specifying a rule as a dependency, it's output files will be used for checking if out of date. If the rule has no output files, the instant it finished executing will be used.
You may specify the execution as either:
1. Short form
In the short form you simply specify all commands to run in an array.
You may not specify any options (such as the working directory) using this form
```yaml
exec:
- [bash, ...]
- [cp, ...]
- ...
```
2. Full form
The full form allow you to fully specify everything, but you must put the arguments of each command within an inner `args` key.
```yaml
exec:
- cwd: "..."
args: [bash, ...]
- [cp, ...]
- ...
```
Each argument may also be a command. The `stdout` of the sub-command will be passed as the argument to the parent command. Supports the following versions:
1. Short form
Similarly to top-level exec, you may just specify an array
```yaml
exec:
- - cat
- pwd: ".."
args: [find, ".", -iname, "myfile.txt"]
```
2. Long form
```yaml
exec:
- - cat
- strip_on_fail: false
cmd:
pwd: ".."
args: [find, ".", -iname, "myfile.txt"]
```
3. Special case for `strip_on_fail`
```yaml
exec:
- - cat
- strip_on_fail:
pwd: ".."
args: [find, ".", -iname, "myfile.txt"]
```
`strip_on_fail` will strip the argument if the command within it fails. This is useful for optional arguments.
The example uses the full syntax for the inner command, but you can use the short syntax. For example the following works:
```yaml
exec:
- - cat
- [find, ".", -iname, "myfile.txt"]
```
## Aliases
You may define aliases (either global or rule-scoped) which give a name to a value.
They are typically used to reduce the repetition of a rule, or abstract a certain value (such as an output directory) behind a variable, so it may be more easily changed.
You may instantiate an alias using `$(<alias>)` where `<alias>` is your alias' name.
Aliases may also include patterns. When matching rule patterns, all aliases are expanded and only patterns remain, if they contain patterns.
Aliases may also include operators. Specified as `$(<alias>::<op1>::<op2>)`. The following operators are currently supported:
1. `dir_name`
Resolves the alias into a path, then returns the parent path (without a trailing `/`).
## Dependency files
zbuild has limited support for make dependency files, typically generated by `gcc -MF <file>` or other tools.
zbuild can read these files both as output dependencies for a rule or as input dependencies.
For most use cases you will likely have them as output dependencies, but having them as input dependencies has a huge advantage if the dependencies of a file (file-a) depend on another (file-b) and you don't want to hard-code theses dependencies onto the zbuild manifest.
You can instead create a program that reads file-b, outputs a dependency file, then specify that file as a dependency of file-a, and zbuild will ensure that all dependencies are generated.
## Limitations
Dependency files may currently only specify a single dependency, which must be:
1. If the rule has any outputs, any of the outputs
2. If the rule has no dependencies, the name of the rule
## Unimplemented features
Currently conditions are not implemented in any way, neither checking or depending on then
Alias operators cannot be used if the value has any patterns when used in the rule output section. This would complicate pattern matching severely, so it has not been implemented yet.
Glob dependencies currently don't exist.

View File

@ -1,6 +1,6 @@
# Maintainer: Filipe Rodrigues <filipejacintorodrigues1@gmail.com>
pkgname=zbuild
pkgver=0.1.9
pkgver=0.1.10
pkgrel=1
pkgdesc="A make-like generic build system "
arch=('x86_64')

View File

@ -4,15 +4,15 @@ zbuild is a modern makefile-like build automation tool.
# Guide
Zbuild uses a yaml file to specify all rules. It will look for for the nearest `zbuild.yaml` file in the current or parent directories.
Zbuild uses a file to specify all rules. It will look for for the nearest `zbuild.zb` file in the current or parent directories.
See the [Guide](./GUIDE.md) for a proper introduction to zbuild.
<!-- TODO: Re-create the guide and link it here. -->
# Examples
See the `examples/` directory for examples.
In particular the `examples/simple_c/zbuild.yaml` is a fully documented example that helps you understand how zbuild works.
In particular the `examples/simple_c/zbuild.zb` is a fully documented example that helps you understand how zbuild works.
# Watching

View File

@ -1,61 +0,0 @@
default: [out/0.out]
rules:
0:
out: [out/0^(name).out]
deps: [out/1^(name)a.out, out/1^(name)b.out, out/1^(name)c.out]
exec:
- [touch, out/0^(name).out]
1:
out: [out/1^(name).out]
deps: [out/2^(name)a.out, out/2^(name)b.out, out/2^(name)c.out]
exec:
- [touch, out/1^(name).out]
2:
out: [out/2^(name).out]
deps: [out/3^(name)a.out, out/3^(name)b.out, out/3^(name)c.out]
exec:
- [touch, out/2^(name).out]
3:
out: [out/3^(name).out]
deps: [out/4^(name)a.out, out/4^(name)b.out, out/4^(name)c.out]
exec:
- [touch, out/3^(name).out]
4:
out: [out/4^(name).out]
deps: [out/5^(name)a.out, out/5^(name)b.out, out/5^(name)c.out]
exec:
- [touch, out/4^(name).out]
5:
out: [out/5^(name).out]
deps: [out/6^(name)a.out, out/6^(name)b.out, out/6^(name)c.out]
exec:
- [touch, out/5^(name).out]
6:
out: [out/6^(name).out]
deps: [out/7^(name)a.out, out/7^(name)b.out, out/7^(name)c.out]
exec:
- [touch, out/6^(name).out]
7:
out: [out/7^(name).out]
deps: [out/8^(name)a.out, out/8^(name)b.out, out/8^(name)c.out]
exec:
- [touch, out/7^(name).out]
8:
out: [out/8^(name).out]
deps: [static: out/]
exec:
- [touch, out/8^(name).out]
out:
out: ["out/"]
exec:
- [mkdir, -p, out/]

113
examples/deep/zbuild.zb Normal file
View File

@ -0,0 +1,113 @@
default "out/0.out";
rule r0 {
pat name;
out "out/0{name}.out";
dep "out/1{name}a.out";
dep "out/1{name}b.out";
dep "out/1{name}c.out";
exec "touch" "out/0{name}.out";
}
rule r1 {
pat name;
out "out/1{name}.out";
dep "out/2{name}a.out";
dep "out/2{name}b.out";
dep "out/2{name}c.out";
exec "touch" "out/1{name}.out";
}
rule r2 {
pat name;
out "out/2{name}.out";
dep "out/3{name}a.out";
dep "out/3{name}b.out";
dep "out/3{name}c.out";
exec "touch" "out/2{name}.out";
}
rule r3 {
pat name;
out "out/3{name}.out";
dep "out/4{name}a.out";
dep "out/4{name}b.out";
dep "out/4{name}c.out";
exec "touch" "out/3{name}.out";
}
rule r4 {
pat name;
out "out/4{name}.out";
dep "out/5{name}a.out";
dep "out/5{name}b.out";
dep "out/5{name}c.out";
exec "touch" "out/4{name}.out";
}
rule r5 {
pat name;
out "out/5{name}.out";
dep "out/6{name}a.out";
dep "out/6{name}b.out";
dep "out/6{name}c.out";
exec "touch" "out/5{name}.out";
}
rule r6 {
pat name;
out "out/6{name}.out";
dep "out/7{name}a.out";
dep "out/7{name}b.out";
dep "out/7{name}c.out";
exec "touch" "out/6{name}.out";
}
rule r7 {
pat name;
out "out/7{name}.out";
dep "out/8{name}a.out";
dep "out/8{name}b.out";
dep "out/8{name}c.out";
exec "touch" "out/7{name}.out";
}
rule r8 {
pat name;
out "out/8{name}.out";
dep static "out/";
exec "touch" "out/8{name}.out";
}
rule out {
out "out/";
exec "mkdir" "-p" "out/";
}

0
examples/empty/zbuild.zb Normal file
View File

View File

@ -0,0 +1,10 @@
default "a.out";
rule a {
out "a.out";
dep "b1.out";
dep "b2.out";
exec "touch" "a.out";
}

View File

@ -1,14 +0,0 @@
default: [a.out]
rules:
a:
out: [a.out]
deps: [b.out]
exec:
- [touch, a.out]
b:
out: [b.out]
deps: [a.out]
exec:
- [touch, b.out]

View File

@ -0,0 +1,17 @@
default "a.out";
rule a {
out "a.out";
dep "b.out";
exec "touch" "a.out";
}
rule b {
out "b.out";
dep "a.out";
exec "touch" "b.out";
}

1
examples/failures/timeout/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
*.out

View File

@ -0,0 +1,31 @@
default "a.out";
rule a {
out "a.out";
dep "b.out";
dep "c1.out";
exec "touch" "a.out";
}
rule b {
out "b.out";
exec "sleep" "0.1";
exec "false";
}
rule c1 {
out "c1.out";
dep "c2.out";
exec "touch" "c1.out";
}
rule c2 {
out "c2.out";
exec "sleep" "0.2";
exec "touch" "c2.out";
}

View File

@ -1,14 +0,0 @@
default: [a.out]
rules:
a:
out: [a.out]
deps: [b1.out, b2.out]
exec:
- [touch, a.out]
b:
out: [b1.out, b2.out]
exec:
- [touch, b1.out]
- [touch, b2.out]

View File

@ -0,0 +1,18 @@
default "a.out";
rule a {
out "a.out";
dep "b1.out";
dep "b2.out";
exec "touch" "a.out";
}
rule b {
out "b1.out";
out "b2.out";
exec "touch" "b1.out";
exec "touch" "b2.out";
}

View File

@ -1,54 +0,0 @@
# Global aliases
# These are aliases that may be used in the whole manifest.
# You can reference them by using `$(<name>)` at any point.
alias:
build_dir: build
src_dir: src
# Default targets
# These are the targets (i.e. files / rules) that will be built
# when no command line arguments are given.
default:
- $(build_dir)/my_proj.out
# Rules
# zbuild will examine these to figure out how to build any target
rules:
# Each rule has a unique name, used only for it's identification
compile_o:
# Rule aliases
# Rules may contain scoped aliases, specific to the rule.
alias:
my_alias: ...
# Output items
#
# `^(...)` can be used on the output items to create a "pattern".
# When finding how to build a target, each rule's output will be tested against
# the target. For example, if `build/a.o` is being checked, `^(name)` will become
# `a`. This can then be used on the rest of the rule, such as the dependencies.
out: [$(build_dir)/^(name).o]
# Dependencies
# These are the items that your rule requires.
# There are several types of dependencies, see the guide for more detail
deps: [$(src_dir)/^(name).c]
# Execution
#
# Specifies an array of commands to execute to build this rule. Each command is an array of arguments. It will not be passed to a shell, but instead be executed as-is
exec:
- [mkdir, -p, $(build_dir)]
- [gcc, $(src_dir)/^(name).c, -c, -o, $(build_dir)/^(name).o]
compile_out:
out: [$(build_dir)/my_proj.out]
deps: [$(build_dir)/a.o, $(build_dir)/b.o]
exec:
- [gcc, $(build_dir)/a.o, $(build_dir)/b.o, -o, $(build_dir)/my_proj.out]
# Rules don't need outputs & dependencies, they can simply be executables
clean:
exec:
- [rm, -rf, $(build_dir)]

View File

@ -0,0 +1,60 @@
# Global aliases
# These are aliases that may be used in the whole manifest.
# You can reference them by using `{<name>}` at any point.
alias build_dir = "build";
alias src_dir = "src";
# Default targets
# These are the targets (i.e. files / rules) that will be built
# when no command line arguments are given.
default "{build_dir}/my_proj.out";
# Rules
# zbuild will examine these to figure out how to build any target
# Each rule has a unique name, used only for it's identification
rule compile_o {
# Rule aliases
# Rules may contain scoped aliases, specific to the rule.
alias my_alias = "...";
# Patterns
# Patterns can be declared inside or outside of rules.
#
# Patterns are wildcards that will be matched against when
# attempting to figure out which rule can build which target.
#
# See the output items for an example usage.
pat name;
# Output items
# These are the items that your rule will create.
#
# When finding how to build a target, each rule's output will be tested against
# the target. For example, if `build/a.o` is being checked, `{name}` will become
# `a`. This can then be used on the rest of the rule, such as the dependencies.
out "{build_dir}/{name}.o";
# Dependencies
# These are the items that your rule requires.
# There are several types of dependencies, see the guide for more detail
dep "{src_dir}/{name}.c";
# Execution
#
# Specifies an array of commands to execute to build this rule. Each command is an array of arguments. It will not be passed to a shell, but instead be executed as-is
exec "mkdir" "-p" build_dir;
exec "gcc" "{src_dir}/{name}.c" "-c" "-o" "{build_dir}/{name}.o";
}
rule compile_out {
out "{build_dir}/my_proj.out";
dep "{build_dir}/a.o";
dep "{build_dir}/b.o";
exec "gcc" "{build_dir}/a.o" "{build_dir}/b.o" "-o" "{build_dir}/my_proj.out";
}
# Rules don't need outputs & dependencies, they can simply be executables
rule clean {
exec "rm" "-rf" build_dir;
}

8
install-extension.sh Executable file
View File

@ -0,0 +1,8 @@
#!/bin/env bash
rsync \
--delete \
--ignore-existing \
--recursive \
vscode-extension/ \
~/.vscode/extensions/zenithsiz.zbuild-0.1.0

View File

@ -1,56 +1,56 @@
# We're fine with unstable features
unstable_features = true
binop_separator = "Back"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 2
brace_style = "SameLineWhere"
combine_control_expr = true
condense_wildcard_suffixes = true
control_brace_style = "AlwaysSameLine"
empty_item_single_line = true
binop_separator = "Back"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 2
brace_style = "SameLineWhere"
combine_control_expr = true
condense_wildcard_suffixes = true
control_brace_style = "AlwaysSameLine"
empty_item_single_line = true
enum_discrim_align_threshold = 100
error_on_line_overflow = false
error_on_unformatted = false
fn_params_layout = "Tall"
fn_single_line = false
force_explicit_abi = true
force_multiline_blocks = false
format_code_in_doc_comments = false
format_macro_bodies = true
format_macro_matchers = true
format_strings = true
group_imports = "Preserve"
hard_tabs = true
hex_literal_case = "Lower"
imports_granularity = "One"
imports_indent = "Block"
imports_layout = "HorizontalVertical"
indent_style = "Block"
inline_attribute_width = 0
match_arm_blocks = false
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
max_width = 120
merge_derives = false
newline_style = "Unix"
normalize_comments = false
normalize_doc_attributes = false
overflow_delimited_expr = true
remove_nested_parens = true
reorder_impl_items = true
reorder_imports = true
reorder_modules = true
space_after_colon = true
space_before_colon = false
spaces_around_ranges = false
error_on_line_overflow = false
error_on_unformatted = false
fn_params_layout = "Tall"
fn_single_line = false
force_explicit_abi = true
force_multiline_blocks = false
format_code_in_doc_comments = false
format_macro_bodies = true
format_macro_matchers = true
format_strings = true
group_imports = "Preserve"
hard_tabs = true
hex_literal_case = "Lower"
imports_granularity = "One"
imports_indent = "Block"
imports_layout = "HorizontalVertical"
indent_style = "Block"
inline_attribute_width = 0
match_arm_blocks = false
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
max_width = 120
merge_derives = false
newline_style = "Unix"
normalize_comments = false
normalize_doc_attributes = false
overflow_delimited_expr = true
remove_nested_parens = true
reorder_impl_items = true
reorder_imports = true
reorder_modules = true
space_after_colon = true
space_before_colon = false
spaces_around_ranges = false
struct_field_align_threshold = 20
struct_lit_single_line = true
trailing_comma = "Vertical"
trailing_semicolon = true
type_punctuation_density = "Wide"
use_field_init_shorthand = true
use_small_heuristics = "Default"
use_try_shorthand = true
where_single_line = false
wrap_comments = false
struct_lit_single_line = true
trailing_comma = "Vertical"
trailing_semicolon = true
type_punctuation_density = "Wide"
use_field_init_shorthand = true
use_small_heuristics = "Default"
use_try_shorthand = true
where_single_line = false
wrap_comments = false

View File

@ -15,6 +15,7 @@ use std::path::PathBuf;
#[derive(Debug)]
#[derive(clap::Parser)]
#[clap(author, version, about)]
#[expect(clippy::struct_excessive_bools, reason = "It's normal to have a lot of switches")]
pub struct Args {
/// All targets to build.
///
@ -43,6 +44,21 @@ pub struct Args {
#[clap(long = "ignore-missing", short = 'i')]
pub ignore_missing: bool,
/// Keeps building files even if an error has occurred.
///
/// Normally, whenever an error occurs, further rules are forbidden
/// to execute, although currently executing rules continue running.
///
/// This makes it so that whenever an error occurs,
/// we continue searching and executing rules until there is nothing
/// else we can do
#[clap(long = "keep-going")]
pub keep_going: bool,
/// Always build rules, even if their outputs are up to date
#[clap(long = "always-build")]
pub always_build: bool,
/// Watch for file changes and rebuild any necessary targets.
///
/// WARNING: If the log file is situated in the same directory as any watched
@ -62,3 +78,20 @@ pub struct Args {
#[clap(long = "log-file")]
pub log_file: Option<PathBuf>,
}
#[expect(clippy::derivable_impls, reason = "We want to be explicit with the defaults")]
impl Default for Args {
fn default() -> Self {
Self {
targets: vec![],
zbuild_path: None,
jobs: None,
ignore_missing: false,
keep_going: false,
always_build: false,
watch: false,
watcher_debouncer_timeout_ms: None,
log_file: None,
}
}
}

1090
src/ast.rs

File diff suppressed because it is too large Load Diff

View File

@ -11,26 +11,22 @@ pub use self::{lock::BuildResult, reason::BuildReason};
use {
self::lock::{BuildLock, BuildLockDepGuard},
crate::{
error::ResultMultiple,
expand,
rules::{Command, DepItem, Expr, ExprTree, OutItem, Rule, Target},
util::{self, ArcStr},
AppError,
Expander,
Rules,
error::{self, AppErrorData},
expand,
rules::{Command, DepItem, Expr, ExprTree, OutItem, Rule, Target},
util::{self, ArcStr},
},
anyhow::Context,
dashmap::DashMap,
futures::{stream::FuturesUnordered, StreamExt},
indexmap::IndexMap,
futures::{StreamExt, TryStreamExt, stream::FuturesUnordered},
indicatif::ProgressBar,
itertools::Itertools,
std::{
collections::{BTreeMap, HashMap},
future::Future,
sync::Arc,
time::SystemTime,
},
tokio::{fs, process, sync::Semaphore, task},
smallvec::SmallVec,
std::{collections::HashMap, fmt, future::Future, process::Stdio, sync::Arc, time::SystemTime},
tokio::{fs, io::AsyncReadExt, process, sync::Semaphore, task},
zutil_app_error::{AllErrs, Context, app_error},
};
/// Event
@ -53,7 +49,7 @@ pub struct TargetRule {
name: ArcStr,
/// Patterns
pats: Arc<BTreeMap<ArcStr, ArcStr>>,
pats: SmallVec<[(ArcStr, ArcStr); 1]>,
}
/// Builder
@ -87,6 +83,12 @@ pub struct Builder {
/// If the execution semaphore should be closed on the first error
stop_builds_on_first_err: bool,
/// Whether we should always build rules, even if their outputs are up to date
always_build: bool,
/// Progress bar
progress_bar: Option<ProgressBar>,
}
impl Builder {
@ -96,6 +98,8 @@ impl Builder {
rules: Rules,
expander: Expander,
stop_builds_on_first_err: bool,
always_build: bool,
progress_bar: Option<ProgressBar>,
) -> Result<Self, AppError> {
let (event_tx, event_rx) = async_broadcast::broadcast(jobs);
let event_rx = event_rx.deactivate();
@ -110,22 +114,21 @@ impl Builder {
let output_file = match output {
OutItem::File { file: output_file, .. } => output_file,
};
let expand_visitor = expand::Visitor::from_aliases([&rule.aliases, &rules.aliases])
.with_default_pat(expand::FlowControl::Keep);
let expand_visitor =
expand::Visitor::new([&rule.aliases, &rules.aliases], [&rule.pats, &rules.pats], []);
let output_file = expander.expand_expr(output_file, &expand_visitor)?;
// Then try to insert it
if let Some(prev_rule_name) = rule_output_tree
.insert(&output_file, rule_name.clone())
.context("Unable to add rule output to tree")
.map_err(AppError::Other)?
.insert(&output_file, rule_name.clone(), &[&rule.pats, &rules.pats])
.context("Unable to add rule output to tree")?
{
return Err(AppError::Other(anyhow::anyhow!(
return Err(app_error!(
"Multiple rules match the same output file: {output_file}\n first rule: {prev_rule_name}\n \
second rule: {rule_name}"
)));
};
));
}
}
}
@ -138,11 +141,13 @@ impl Builder {
rule_output_tree,
exec_semaphore: Semaphore::new(jobs),
stop_builds_on_first_err,
always_build,
progress_bar,
})
}
/// Returns all build results
pub async fn build_results(&self) -> IndexMap<TargetRule, Option<Result<BuildResult, ()>>> {
pub async fn build_results(&self) -> HashMap<TargetRule, Option<Result<BuildResult, ()>>> {
self.rules_lock
.iter()
.map(|rule_lock| async move { (rule_lock.key().clone(), rule_lock.value().res().await) })
@ -177,10 +182,7 @@ impl Builder {
Target::File { ref file, .. } => match self.rule_output_tree.find(file) {
Some((name, pats)) => {
tracing::trace!(%target, %name, "Found target rule");
TargetRule {
name,
pats: Arc::new(pats),
}
TargetRule { name, pats }
},
None => return Ok(None),
@ -189,7 +191,7 @@ impl Builder {
// If we got a rule name with patterns, find it and replace all patterns
Target::Rule { ref rule, ref pats } => TargetRule {
name: rule.clone(),
pats: Arc::clone(pats),
pats: pats.clone(),
},
};
@ -198,14 +200,15 @@ impl Builder {
.rules
.rules
.get(&*target_rule.name)
.ok_or_else(|| AppError::UnknownRule {
rule_name: (*target_rule.name).to_owned(),
})?;
let expand_visitor = expand::Visitor::new([&rule.aliases, &self.rules.aliases], [&target_rule.pats]);
.with_context(|| format!("Unknown rule {:?}", target_rule.name))?;
let expand_visitor =
expand::Visitor::new([&rule.aliases, &self.rules.aliases], [&rule.pats, &self.rules.pats], [
target_rule.pats.clone(),
]);
let rule = self
.expander
.expand_rule(rule, &expand_visitor)
.map_err(AppError::expand_rule(&*rule.name))?;
.with_context(|| format!("Unable to expand rule {:?}", rule.name))?;
Ok(Some((rule, target_rule)))
}
@ -239,34 +242,18 @@ impl Builder {
reason: BuildReason,
) -> Result<(BuildResult, Option<BuildLockDepGuard>), AppError> {
// Expand the target
let expand_visitor = expand::Visitor::from_aliases([&self.rules.aliases]);
let expand_visitor = expand::Visitor::new([&self.rules.aliases], [&self.rules.pats], []);
let target = self
.expander
.expand_target(target, &expand_visitor)
.map_err(AppError::expand_target(target))?;
.with_context(|| format!("Unable to expand target {target}"))?;
// Then build
self.build(&target, ignore_missing, reason).await
}
/// Builds a target
// TODO: Remove this wrapper function once the compiler behaves.
#[expect(
clippy::manual_async_fn,
reason = "For some reason, without this wrapper, the compiler
can't see that `build_inner`'s future is `Send`"
)]
pub fn build<'a>(
self: &'a Arc<Self>,
target: &'a Target<ArcStr>,
ignore_missing: bool,
reason: BuildReason,
) -> impl Future<Output = Result<(BuildResult, Option<BuildLockDepGuard>), AppError>> + Send + 'a {
async move { self.build_inner(target, ignore_missing, reason).await }
}
/// Inner function for [`Builder::build`]
pub async fn build_inner<'a>(
pub async fn build<'a>(
self: &'a Arc<Self>,
target: &'a Target<ArcStr>,
ignore_missing: bool,
@ -286,7 +273,9 @@ impl Builder {
match *target {
Target::File { ref file, .. } => match fs::symlink_metadata(&**file).await {
Ok(metadata) => {
let build_time = metadata.modified().map_err(AppError::get_file_modified_time(&**file))?;
let build_time = metadata
.modified()
.with_context(|| format!("Unable to get file modified time: {file:?}"))?;
tracing::trace!(%target, ?build_time, "Found target file");
return Ok((
BuildResult {
@ -297,7 +286,7 @@ impl Builder {
));
},
Err(_) if ignore_missing => {
tracing::debug!(?file, "Ignoring missing target file");
tracing::info!(?file, "Ignoring missing target file");
return Ok((
BuildResult {
// Note: We simply pretend the file was built right now
@ -309,10 +298,8 @@ impl Builder {
));
},
Err(err) =>
return Err(AppError::MissingFile {
file_path: (**file).into(),
source: err,
}),
do yeet AppError::new(&err)
.context(format!("Missing file {file:?} and no rule to build it found")),
},
// Note: If `target_rule` returns `Err` if this was a rule, so we can never reach here
Target::Rule { .. } => unreachable!(),
@ -322,7 +309,7 @@ impl Builder {
// Get the built lock, or create it
let build_lock = self
.rules_lock
.entry(target_rule)
.entry(target_rule.clone())
.or_insert_with(BuildLock::new)
.clone();
@ -331,12 +318,9 @@ impl Builder {
// First check if we're done with a dependency lock
let build_guard = build_lock.lock_dep().await;
if let Some(res) = build_guard.res() {
return res
.map(|res| (res, Some(build_guard)))
.map_err(|()| AppError::BuildTarget {
source: None,
target: target.to_string(),
});
return res.map(|res| (res, Some(build_guard))).map_err(|()| {
AppError::msg_with_data("Unable to build target {target}", AppErrorData { should_ignore: true })
});
}
// Otherwise, try to upgrade to a build lock
@ -347,9 +331,8 @@ impl Builder {
// the writer should have priority, so this shouldn't result in much
// waiting for them.
let res = build_guard.try_upgrade_into_build().await;
match res {
Ok(build_guard) => break build_guard,
Err(_) => continue,
if let Ok(build_guard) = res {
break build_guard;
}
};
@ -358,11 +341,30 @@ impl Builder {
let res = task::spawn({
let this = Arc::clone(self);
let target = Arc::clone(&target);
async move { this.build_unchecked(&target, &rule, ignore_missing, reason).await }
// TODO: Remove this wrapper function once the compiler behaves.
#[expect(
clippy::manual_async_fn,
reason = "For some reason, without this wrapper, the compiler can't see that `build_inner`'s future \
is `Send` without explicitly annotating it in the return type"
)]
fn build_inner(
this: Arc<Builder>,
target: Arc<Target<ArcStr>>,
rule: Rule<ArcStr>,
ignore_missing: bool,
reason: BuildReason,
target_rule: TargetRule,
) -> impl Future<Output = Result<BuildResult, AppError>> + Send {
async move {
this.build_unchecked(&target, &rule, ignore_missing, reason, &target_rule)
.await
}
}
build_inner(this, target, rule, ignore_missing, reason, target_rule)
})
.await
.context("Unable to join task")
.map_err(AppError::Other)?;
.context("Unable to join task")?;
match res {
Ok(res) => {
@ -373,10 +375,10 @@ impl Builder {
Err(err) => {
// If we should, close the exec semaphore to ensure we exit as early as possible
// Note: This check is racy, but it's fine to print this warning multiple times. We just don't want
// to spam the user, since all further errors will likely caused by `AppError::ExecSemaphoreClosed`,
// to spam the user, since all further errors will likely caused by the semaphore closing,
// while the first few are the useful ones with the reason why the execution semaphore is being closed.
if self.stop_builds_on_first_err && !self.exec_semaphore.is_closed() {
tracing::debug!(err=%err.pretty(), "Stopping all future builds due to failure of target {target}");
tracing::debug!(err=%error::pretty(&err), "Stopping all future builds due to failure of target {target}");
self.exec_semaphore.close();
}
@ -387,14 +389,77 @@ impl Builder {
}
/// Builds a target without checking if the target is already being built.
#[expect(clippy::too_many_lines, reason = "TODO: Split this function onto smaller ones")]
async fn build_unchecked(
self: &Arc<Self>,
target: &Target<ArcStr>,
rule: &Rule<ArcStr>,
ignore_missing: bool,
reason: BuildReason,
target_rule: &TargetRule,
) -> Result<BuildResult, AppError> {
if let Some(progress_bar) = &self.progress_bar {
progress_bar.inc_length(1);
}
// Build all dependencies
let deps = self.build_deps_unchecked(target, rule, ignore_missing, reason).await?;
let deps_last_build_time = deps
.iter()
.filter(|(dep_target, ..)| !dep_target.is_static())
.map(|(_, dep_res, _)| dep_res.build_time)
.max();
tracing::trace!(%target, ?rule.name, ?deps_last_build_time, ?deps, "Built target rule dependencies");
// Afterwards check the last time we've built the rule and compare it with
// the dependency build times.
let rule_last_build_time = self::rule_last_build_time(rule).await;
let needs_rebuilt = match (deps_last_build_time, &rule_last_build_time) {
// If any files were missing, or we had no outputs, build
(_, Err(_) | Ok(None)) => true,
// If no dependencies and all outputs exist, don't rebuild
(None, Ok(_)) => false,
// If we have dependencies and outputs, rebuild if the dependencies are
// newer than the outputs
(Some(deps_last_build_time), Ok(Some(rule_last_build_time))) =>
deps_last_build_time > *rule_last_build_time,
};
let needs_rebuilt = needs_rebuilt || self.always_build;
// Then rebuild, if needed
if needs_rebuilt {
tracing::trace!(%target, ?rule.name, ?deps_last_build_time, ?rule_last_build_time, "Rebuilding target rule");
self.rebuild_rule(rule, target_rule)
.await
.with_context(|| format!("Unable to build rule {:?}", rule.name))?;
}
// Then get the build time
// Note: If we don't have any outputs, just use the current time as the build time
let cur_build_time = self::rule_last_build_time(rule).await?.unwrap_or_else(SystemTime::now);
let res = BuildResult {
build_time: cur_build_time,
built: needs_rebuilt,
};
if let Some(progress_bar) = &self.progress_bar {
progress_bar.inc(1);
}
Ok(res)
}
/// Builds all dependencies of `target`
#[expect(clippy::too_many_lines, reason = "TODO: Split this function onto smaller ones")]
async fn build_deps_unchecked(
self: &Arc<Self>,
target: &Target<ArcStr>,
rule: &Rule<ArcStr>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<Vec<(Target<ArcStr>, BuildResult, Option<BuildLockDepGuard>)>, AppError> {
/// Dependency
#[derive(Clone, Debug)]
enum Dep {
@ -409,10 +474,7 @@ impl Builder {
},
/// Rule
Rule {
name: ArcStr,
pats: Arc<BTreeMap<ArcStr, ArcStr>>,
},
Rule { name: ArcStr },
}
// Gather all normal dependencies
@ -434,19 +496,14 @@ impl Builder {
is_optional,
exists: util::fs_try_exists_symlink(&**file)
.await
.map_err(AppError::check_file_exists(&**file))?,
}),
DepItem::Rule { ref name, ref pats } => Ok(Dep::Rule {
name: name.clone(),
pats: Arc::clone(pats),
.with_context(|| format!("Unable to check if file exists {file:?}"))?,
}),
DepItem::Rule { ref name } => Ok(Dep::Rule { name: name.clone() }),
}
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.collect::<ResultMultiple<Vec<_>>>()?;
.collect::<AllErrs<Vec<_>, _>>()
.await?;
// And all output dependencies
#[expect(
@ -469,20 +526,17 @@ impl Builder {
is_optional: false,
exists: util::fs_try_exists_symlink(&**file)
.await
.map_err(AppError::check_file_exists(&**file))?,
.with_context(|| format!("Unable to check if file exists {file:?}"))?,
})),
_ => Ok(None),
}
})
.collect::<FuturesUnordered<_>>()
.filter_map(move |res| async move { res.transpose() })
.collect::<Vec<_>>()
.await
.into_iter()
.collect::<ResultMultiple<Vec<_>>>()?;
.collect::<AllErrs<Vec<_>, _>>()
.await?;
// Then build all dependencies, as well as any dependency files
// TODO: Don't collect like 3 times during this
let deps = util::chain!(normal_deps, out_deps)
.map(|dep| {
tracing::trace!(%target, ?rule.name, ?dep, "Found target rule dependency");
@ -504,10 +558,10 @@ impl Builder {
is_static,
}),
// If a rule, always build
Dep::Rule { ref name, ref pats } => Some(Target::Rule {
Dep::Rule { ref name } => Some(Target::Rule {
rule: name.clone(),
pats: Arc::clone(pats),
// TODO: Allow specifying the patterns here?
pats: SmallVec::new(),
}),
};
@ -521,7 +575,7 @@ impl Builder {
reason.with_target(target.clone()),
)
.await
.map_err(AppError::build_target(&dep_target))?;
.with_context(|| format!("Unable to build target {dep_target}"))?;
tracing::trace!(%target, ?rule.name, ?dep, ?res, "Built target rule dependency");
self.send_event(|| Event::TargetDepBuilt {
@ -537,7 +591,6 @@ impl Builder {
};
// If the dependency if a dependency deps file or an output deps file (and exists), build it's dependencies too
#[expect(clippy::wildcard_enum_match_arm, reason = "We only care about some variants")]
let dep_deps = match &dep {
// Non-optional we don't check if they exist, so that an error
// pops up if they don't.
@ -565,65 +618,24 @@ impl Builder {
} => self
.build_deps_file(target, file, rule, ignore_missing, reason)
.await
.map_err(AppError::build_deps_file(&**file))?,
_ => vec![],
.with_context(|| format!("Unable to build dependencies file {file:?}"))?,
Dep::File { .. } | Dep::Rule { .. } => vec![],
};
tracing::trace!(%target, ?rule.name, ?dep, ?dep_res, ?dep_deps, "Built target rule dependency dependencies");
let deps = util::chain!(dep_res, dep_deps.into_iter()).collect::<Vec<_>>();
tracing::trace!(%target, ?rule.name, ?dep, ?deps, "Built target rule dependency dependencies");
let deps = util::chain!(dep_res, dep_deps.into_iter());
Ok(deps)
Ok::<_, AppError>(deps)
}
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.collect::<ResultMultiple<Vec<_>>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
.map_ok(|deps| deps.map(Ok))
.map_ok(futures::stream::iter)
.try_flatten()
.collect::<AllErrs<Vec<_>, _>>()
.await?;
let deps_last_build_time = deps
.iter()
.filter(|(dep_target, ..)| !dep_target.is_static())
.map(|(_, dep_res, _)| dep_res.build_time)
.max();
tracing::trace!(%target, ?rule.name, ?deps_last_build_time, ?deps, "Built target rule dependencies");
// Afterwards check the last time we've built the rule and compare it with
// the dependency build times.
let rule_last_build_time = self::rule_last_build_time(rule).await;
let needs_rebuilt = match (deps_last_build_time, &rule_last_build_time) {
// If any files were missing, or we had no outputs, build
(_, Err(_) | Ok(None)) => true,
// If no dependencies and all outputs exist, don't rebuild
(None, Ok(_)) => false,
// If we have dependencies and outputs, rebuild if the dependencies are
// newer than the outputs
(Some(deps_last_build_time), Ok(Some(rule_last_build_time))) =>
deps_last_build_time > *rule_last_build_time,
};
// Then rebuild, if needed
if needs_rebuilt {
tracing::trace!(%target, ?rule.name, ?deps_last_build_time, ?rule_last_build_time, "Rebuilding target rule");
self.rebuild_rule(rule)
.await
.map_err(AppError::build_rule(&*rule.name))?;
}
// Then get the build time
// Note: If we don't have any outputs, just use the current time as the build time
let cur_build_time = self::rule_last_build_time(rule).await?.unwrap_or_else(SystemTime::now);
let res = BuildResult {
build_time: cur_build_time,
built: needs_rebuilt,
};
Ok(res)
Ok(deps)
}
/// Builds all dependencies of a `deps` file.
@ -645,13 +657,12 @@ impl Builder {
let matches_rule = |output: &str| match rule.output.is_empty() {
// If there were no outputs, make sure it matches the rule name
// TODO: Seems kinda weird for it to match the rule name, but not sure how else to check this here
true => (output == &*rule.name)
.then_some(())
.ok_or_else(|| AppError::DepFileMissingRuleName {
deps_file_path: deps_file.into(),
rule_name: rule.name.to_string(),
dep_output: output.to_owned(),
}),
true => (output == &*rule.name).then_some(()).ok_or_else(|| {
app_error!(
"Dependencies file {deps_file:?} is missing the rule name {:?}, found {output:?}",
rule.name
)
}),
// If there were any output, make sure the dependency file applies to one of them
false => rule
@ -661,10 +672,11 @@ impl Builder {
OutItem::File { file, .. } => &**file == output,
})
.then_some(())
.ok_or_else(|| AppError::DepFileMissingOutputs {
deps_file_path: deps_file.into(),
rule_outputs: rule.output.iter().map(OutItem::to_string).collect(),
dep_output: output.to_owned(),
.ok_or_else(|| {
app_error!(
"Dependencies file {deps_file:?} is missing any output of {:?}, found {output:?}",
rule.output.iter().map(OutItem::to_string).collect::<Vec<_>>()
)
}),
};
@ -676,14 +688,10 @@ impl Builder {
match oks.is_empty() {
true => {
// If we had no matching outputs, try to return all errors
errs.into_iter()
.map(|(_, err)| Err(err))
.collect::<ResultMultiple<()>>()?;
errs.into_iter().map(|(_, err)| Err(err)).collect::<AllErrs<(), _>>()?;
// If no errors existed, return an error for that
return Err(AppError::DepFileEmpty {
deps_file_path: deps_file.into(),
});
zutil_app_error::bail!("Dependencies file {deps_file:?} had no dependencies");
},
// Otherwise, just log and remove all errors
@ -691,7 +699,7 @@ impl Builder {
for (output, err) in errs {
let _: Vec<_> = deps.remove(&output).expect("Dependency should exist");
tracing::warn!(target=%parent_target, ?rule.name, err=%err.pretty(), "Ignoring unknown output in dependency file");
tracing::warn!(target=%parent_target, ?rule.name, err=%error::pretty(&err), "Ignoring unknown output in dependency file");
},
}
@ -711,7 +719,7 @@ impl Builder {
let (res, dep_guard) = self
.build(&dep_target, ignore_missing, reason.with_target(parent_target.clone()))
.await
.map_err(AppError::build_target(&dep_target))?;
.with_context(|| format!("Unable to build target {dep_target}"))?;
self.send_event(|| Event::TargetDepBuilt {
target: parent_target.clone(),
@ -723,16 +731,14 @@ impl Builder {
}
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.collect::<ResultMultiple<_>>()?;
.collect::<AllErrs<_, _>>()
.await?;
Ok(deps_res)
}
/// Rebuilds a rule
pub async fn rebuild_rule(&self, rule: &Rule<ArcStr>) -> Result<(), AppError> {
pub async fn rebuild_rule(&self, rule: &Rule<ArcStr>, target_rule: &TargetRule) -> Result<(), AppError> {
// Lock the semaphore
// Note: If we locked it per-command, we could exit earlier
// when closed, but that would break some executions.
@ -742,11 +748,12 @@ impl Builder {
// be in a "bad state", and since all the modification dates
// match it wouldn't be rebuilt.
let Ok(_permit) = self.exec_semaphore.acquire().await else {
do yeet AppError::ExecSemaphoreClosed {};
do yeet AppError::msg_with_data("Execution semaphore was closed", AppErrorData { should_ignore: true });
};
let mut stdout_aliases = HashMap::new();
for cmd in &rule.exec.cmds {
self.exec_cmd(rule, cmd).await?;
self.exec_cmd(rule, cmd, target_rule, &mut stdout_aliases).await?;
}
Ok(())
@ -754,11 +761,28 @@ impl Builder {
/// Executes `cmd`.
#[expect(unused_results, reason = "Due to the builder pattern of `Command`")]
async fn exec_cmd(&self, rule: &Rule<ArcStr>, cmd: &Command<ArcStr>) -> Result<(), AppError> {
async fn exec_cmd(
&self,
rule: &Rule<ArcStr>,
cmd: &Command<Expr>,
target_rule: &TargetRule,
stdout_aliases: &mut HashMap<ArcStr, Expr>,
) -> Result<(), AppError> {
let expand_visitor = expand::Visitor::new(
[stdout_aliases, &rule.aliases, &self.rules.aliases],
[&rule.pats, &self.rules.pats],
[target_rule.pats.clone()],
);
let cmd = self
.expander
.expand_cmd::<ArcStr>(cmd, &expand_visitor)
.with_context(|| format!("Unable to expand rule command {:?}", rule.name))?;
// Get the program name
let (program, args) = cmd.args.split_first().ok_or_else(|| AppError::RuleExecEmpty {
rule_name: rule.name.to_string(),
})?;
let (program, args) = cmd
.args
.split_first()
.ok_or_else(|| app_error!("Rule {:?} executable was empty", rule.name))?;
// Create the command and feed in all the arguments
let mut os_cmd = process::Command::new(&**program);
@ -769,21 +793,55 @@ impl Builder {
os_cmd.current_dir(&**cwd);
}
// Capture stdout, if we should
if cmd.stdout.is_some() {
os_cmd.stdout(Stdio::piped());
}
// Then spawn it and measure
tracing::debug!(target: "zbuild_exec", "{program} {}",
args.iter().join(" ")
);
let (duration, ()) = util::try_measure_async(async {
os_cmd
.status()
let (duration, output) = util::try_measure_async::<_, _, AppError>(async {
// Spawn
let mut output = os_cmd
.spawn()
.with_context(|| format!("Unable to spawn {}", self::cmd_to_string(&cmd)))?;
// Then wait for it to finish
output
.wait()
.await
.map_err(AppError::spawn_command(cmd))?
.with_context(|| format!("Command failed to start {}", self::cmd_to_string(&cmd)))?
.exit_ok()
.map_err(AppError::command_failed(cmd))
.with_context(|| format!("Command failed to run {}", self::cmd_to_string(&cmd)))?;
Ok(output)
})
.await?;
tracing::trace!(target: "zbuild_exec", rule_name=?rule.name, ?program, ?args, ?duration, "Execution duration");
if let Some(stdout_alias) = &cmd.stdout {
// Read the stdout
let mut stdout = String::new();
output
.stdout
.expect("Stdout was not set")
.read_to_string(&mut stdout)
.await
.with_context(|| format!("Unable to read command stdout {}", self::cmd_to_string(&cmd)))?;
// Trim the last newline, if any
// TODO: This feels like the right behavior, since most tools output a newline at the end, but
// should we have a configuration to avoid it?
if stdout.ends_with('\n') {
stdout.pop();
}
// Then save it under the alias.
stdout_aliases.insert(stdout_alias.clone(), Expr::string(stdout));
}
Ok(())
}
}
@ -791,7 +849,9 @@ impl Builder {
/// Parses a dependencies file
async fn parse_deps_file(file: &str) -> Result<HashMap<ArcStr, Vec<ArcStr>>, AppError> {
// Read it
let mut contents = fs::read_to_string(file).await.map_err(AppError::read_file(file))?;
let mut contents = fs::read_to_string(file)
.await
.with_context(|| format!("Unable to read file {file:?}"))?;
// Replace all backslashes at the end of a line with spaces
// Note: Although it'd be fine to replace it with a single space, by replacing it
@ -799,6 +859,7 @@ async fn parse_deps_file(file: &str) -> Result<HashMap<ArcStr, Vec<ArcStr>>, App
util::string_replace_in_place_with(&mut contents, "\\\n", " ");
// Now go through all non-empty lines and parse them
let contents = ArcStr::from(contents);
let deps = contents
.lines()
.filter_map(|line| {
@ -807,15 +868,18 @@ async fn parse_deps_file(file: &str) -> Result<HashMap<ArcStr, Vec<ArcStr>>, App
})
.map(|line| {
// Parse it
let (output, deps) = line.split_once(':').ok_or_else(|| AppError::DepFileMissingColon {
deps_file_path: file.into(),
})?;
let output = ArcStr::from(output.trim());
let deps = deps.split_whitespace().map(ArcStr::from).collect();
let (output, deps) = line
.split_once(':')
.ok_or_else(|| app_error!("Dependencies file {file:?} was missing a `:`"))?;
let output = contents.slice_from_str(output.trim());
let deps = deps
.split_whitespace()
.map(|dep| contents.slice_from_str(dep))
.collect();
Ok((output, deps))
})
.collect::<ResultMultiple<_>>()?;
.collect::<AllErrs<_, _>>()?;
Ok(deps)
}
@ -836,17 +900,23 @@ async fn rule_last_build_time(rule: &Rule<ArcStr>) -> Result<Option<SystemTime>,
};
let metadata = fs::symlink_metadata(&**file)
.await
.map_err(AppError::read_file_metadata(&**file))?;
let modified_time = metadata.modified().map_err(AppError::get_file_modified_time(&**file))?;
.with_context(|| format!("Unable to read file metadata (not following symlinks) of {file:?}"))?;
let modified_time = metadata
.modified()
.with_context(|| format!("Unable to get file modified time of {file:?}"))?;
Ok(modified_time)
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.collect::<ResultMultiple<Vec<_>>>()?
.collect::<AllErrs<Vec<_>, _>>()
.await?
.into_iter()
.min();
Ok(built_time)
}
/// Helper function to format a `Command` for errors
fn cmd_to_string<T: fmt::Display>(cmd: &Command<T>) -> String {
let inner = cmd.args.iter().map(|arg| format!("\"{arg}\"")).join(" ");
format!("[{inner}]")
}

View File

@ -6,8 +6,8 @@ use {
assert_matches::assert_matches,
mem,
sync::{
atomic::{self, AtomicBool},
Arc,
atomic::{self, AtomicBool},
},
time::SystemTime,
},

View File

@ -2,8 +2,10 @@
// Imports
use {
crate::{rules::Target, util::ArcStr, AppError},
crate::{AppError, rules::Target, util::ArcStr},
itertools::Itertools,
std::{ops::Try, sync::Arc},
zutil_app_error::app_error,
};
/// Inner type for [`BuildReason`].
@ -39,7 +41,7 @@ struct Inner {
/// depends on `B` and `C`, while `C` depends on `D`, the following tree
/// will be created:
///
/// ```
/// ```no_compile
/// ┌─┐ ┌─┐
/// │A│ <- │B│
/// └─┘ └─┘
@ -131,10 +133,10 @@ impl BuildReason {
/// otherwise returns `Ok`.
pub fn check_recursively(&self, target: &Target<ArcStr>) -> Result<(), AppError> {
self.for_each(|parent_target| match target == parent_target {
true => Err(AppError::FoundRecursiveRule {
target: target.to_string(),
parent_targets: self.collect_all().iter().map(Target::to_string).collect(),
}),
true => Err(app_error!(
"Found recursive rule: {target} (Parent rules: {})",
self.collect_all().iter().map(Target::to_string).join(", ")
)),
false => Ok(()),
})
}

View File

@ -1,668 +1,31 @@
//! Errors
// Imports
use {
crate::rules::{AliasOp, Command, Expr, Target},
itertools::{Itertools, Position as ItertoolsPos},
std::{
convert::Infallible,
env,
error::Error as StdError,
fmt,
io,
ops::{ControlFlow, FromResidual, Try},
path::PathBuf,
process::{self, ExitStatusError, Termination},
string::FromUtf8Error,
vec,
},
use std::{
convert::Infallible,
ops::{FromResidual, Yeet},
process::{self, Termination},
};
/// Generates the error enum
macro_rules! decl_error {
(
$(#[$meta:meta])*
$Name:ident;
$Multiple:ident($MultipleTy:ty);
$Other:ident($OtherTy:ty);
/// App error
pub type AppError = zutil_app_error::AppError<AppErrorData>;
$(
$( #[doc = $variant_doc:expr] )*
$(
#[from_fn(
// Function definition
$(#[$variant_fn_meta:meta])*
fn $variant_fn:ident
// Generics
$( <
$( $VariantLifetimes:lifetime, )*
$( $VariantGenerics:ident $(: $VariantBound:path )? ),* $(,)?
> )?
// Error
(
$variant_fn_err:ident: $VariantFnErr:ty $( => $variant_fn_err_expr:expr )?
)
// Args
(
$(
$variant_fn_arg:ident: $VariantFnArg:ty $( => $variant_fn_arg_expr:expr )?
),*
$(,)?
)
// Return type lifetimes
$(
+ $VariantFnLifetime:lifetime
)?
)]
)?
#[source($variant_source:expr)]
#[fmt($($variant_fmt:tt)*)]
$Variant:ident {
$(
$( #[$variant_field_meta:meta] )*
$variant_field:ident: $VariantField:ty
),*
$(,)?
},
)*
) => {
$( #[ $meta ] )*
#[derive(Debug)]
#[non_exhaustive]
pub enum $Name {
/// Multiple
$Multiple($MultipleTy),
/// Other
// TODO: Removes usages of this, it's for quick prototyping
$Other($OtherTy),
$(
$( #[doc = $variant_doc] )*
$Variant {
$(
$( #[$variant_field_meta] )*
$variant_field: $VariantField,
)*
},
)*
}
impl $Name {
$(
$(
#[doc = concat!("Returns a function to create a [`Self::", stringify!($Variant) ,"`] error from it's inner error.")]
$( #[$variant_fn_meta] )*
pub fn $variant_fn
// Generics
$( <
$( $VariantLifetimes, )*
$( $VariantGenerics $(: $VariantBound )?, )*
> )?
// Arguments
( $(
$variant_fn_arg: $VariantFnArg,
)* )
// Return type
-> impl FnOnce($VariantFnErr) -> Self $( + $VariantFnLifetime )?
{
move |$variant_fn_err| Self::$Variant {
$variant_fn_err $(: $variant_fn_err_expr )?,
$(
$variant_fn_arg $(: $variant_fn_arg_expr )?,
)*
}
}
)?
)*
/// Returns an object that can be used for a pretty display of this error
pub fn pretty(&self) -> PrettyDisplay<'_> {
PrettyDisplay::new(self)
}
}
impl StdError for AppError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
// Note: We don't return any of the errors here, so that we can format
// it properly without duplicating errors.
Self::$Multiple(_) => None,
Self::$Other(source) => AsRef::<dyn StdError>::as_ref(source).source(),
$(
#[expect(clippy::allow_attributes, reason = "Auto-generated code")]
#[allow(unused_variables, reason = "Auto-generated code")]
Self::$Variant { $( $variant_field ),* } => $variant_source,
)*
}
}
}
impl fmt::Display for AppError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Display the main message
match self {
Self::$Multiple(errs) => write!(f, "Multiple errors ({})", errs.len()),
Self::$Other(source) => source.fmt(f),
$(
#[expect(clippy::allow_attributes, reason = "Auto-generated code")]
#[allow(unused_variables, reason = "Auto-generated code")]
Self::$Variant { $( $variant_field ),* } => write!(f, $($variant_fmt)*),
)*
}
}
}
}
/// App error data
#[derive(Clone, Copy, Debug)]
pub struct AppErrorData {
/// Whether this error should be ignored when printing
pub should_ignore: bool,
}
decl_error! {
/// Test
AppError;
Multiple(Vec<Self>);
Other(anyhow::Error);
/// Get current directory
#[from_fn( fn get_current_dir(source: io::Error)() )]
#[source(Some(source))]
#[fmt("Unable to get current directory")]
GetCurrentDir {
/// Underlying error
source: io::Error
},
/// Set current directory
#[from_fn(
fn set_current_dir<P: Into<PathBuf>>(source: io::Error)(
dir: P => dir.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to set current directory to {dir:?}")]
SetCurrentDir {
/// Underlying error
source: io::Error,
/// Path we tried to set as current directory
dir: PathBuf
},
/// Read file
#[from_fn(
fn read_file<P: Into<PathBuf>>(source: io::Error)(
file_path: P => file_path.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to read file {file_path:?}")]
ReadFile {
/// Underlying error
source: io::Error,
/// File we failed to read
file_path: PathBuf,
},
/// Read file metadata
#[from_fn(
fn read_file_metadata<P: Into<PathBuf>>(source: io::Error)(
file_path: P => file_path.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to read file metadata (not following symlinks) {file_path:?}")]
ReadFileMetadata {
/// Underlying error
source: io::Error,
/// File we failed to read metadata of
file_path: PathBuf,
},
/// Get file modified time
#[from_fn(
fn get_file_modified_time<P: Into<PathBuf>>(source: io::Error)(
file_path: P => file_path.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to get file modified time {file_path:?}")]
GetFileModifiedTime {
/// Underlying error
source: io::Error,
/// File we failed to get the modified time of
file_path: PathBuf,
},
/// Check if file exists
#[from_fn(
fn check_file_exists<P: Into<PathBuf>>(source: io::Error)(
file_path: P => file_path.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to check if file exists {file_path:?}")]
CheckFileExists {
/// Underlying error
source: io::Error,
/// File we failed to check
file_path: PathBuf,
},
/// Missing file
#[from_fn(
// TODO: For some reason, rustc thinks the following lint is
// unfulfilled, check why.
//#[expect(dead_code, reason = "Not used yet")]
fn missing_file<P: Into<PathBuf>>(source: io::Error)(
file_path: P => file_path.into()
)
)]
#[source(Some(source))]
#[fmt("Missing file {file_path:?} and no rule to build it found")]
MissingFile {
/// Underlying error
source: io::Error,
/// File that is missing
file_path: PathBuf,
},
/// Parse yaml
#[from_fn(
fn parse_yaml<P: Into<PathBuf>>(source: serde_yaml::Error)(
yaml_path: P => yaml_path.into()
)
)]
#[source(Some(source))]
#[fmt("Unable to parse yaml file {yaml_path:?}")]
ParseYaml {
/// Underlying error
source: serde_yaml::Error,
/// Yaml path
yaml_path: PathBuf,
},
/// Spawn command
#[from_fn(
fn spawn_command<T: fmt::Display>(source: io::Error)(
cmd: &Command<T> => self::cmd_to_string(cmd)
) + '_
)]
#[source(Some(source))]
#[fmt("Unable to spawn {cmd}")]
SpawnCommand {
/// Underlying error
source: io::Error,
/// Command formatted
cmd: String,
},
/// Command failed
#[from_fn(
fn command_failed<T: fmt::Display>(source: ExitStatusError)(
cmd: &Command<T> => self::cmd_to_string(cmd)
) + '_
)]
#[source(Some(source))]
#[fmt("Command failed {cmd}")]
CommandFailed {
/// Underlying error
source: ExitStatusError,
/// Command formatted
cmd: String,
},
/// Command output was non-utf8
#[from_fn(
fn command_output_non_utf8<T: fmt::Display>(source: FromUtf8Error)(
cmd: &Command<T> => self::cmd_to_string(cmd)
) + '_
)]
#[source(Some(source))]
#[fmt("Command output was non-utf8 {cmd}")]
CommandOutputNonUtf8 {
/// Underlying error
source: FromUtf8Error,
/// Command formatted
cmd: String,
},
/// Get default jobs
#[from_fn( fn get_default_jobs(source: io::Error)() )]
#[source(Some(source))]
#[fmt("Unable to query system for available parallelism for default number of jobs")]
GetDefaultJobs {
/// Underlying error
source: io::Error
},
/// Zbuild not found
#[source(None)]
#[fmt("No `zbuild.yaml` file found in current or parent directories.\nYou can use `--path {{zbuild-path}}` in order to specify the manifest's path")]
ZBuildNotFound {},
/// Path had no parent
#[source(None)]
#[fmt("Path had no parent directory {path:?}")]
PathParent {
/// Path that had no parent
path: PathBuf,
},
/// Build target
#[from_fn(
fn build_target<'target, T: fmt::Display>(source: Self => Some(Box::new(source)))(
target: &'target Target<T> => target.to_string()
) + 'target
)]
#[source(source.as_deref().map(|err: &AppError| <&dyn StdError>::from(err)))]
#[fmt("Unable to build target {target}")]
BuildTarget {
/// Underlying error
source: Option<Box<Self>>,
/// Formatted target
target: String,
},
/// Build rule
#[from_fn(
fn build_rule<S: Into<String>>(source: Self => Box::new(source))(
rule_name: S => rule_name.into()
)
)]
#[source(Some(&**source))]
#[fmt("Unable to build rule {rule_name}")]
BuildRule {
/// Underlying error
source: Box<Self>,
/// Rule name
rule_name: String,
},
/// Build dependencies file
#[from_fn(
fn build_deps_file<P: Into<PathBuf>>(source: Self => Box::new(source))(
deps_file: P => deps_file.into()
)
)]
#[source(Some(&**source))]
#[fmt("Unable to build dependencies file {deps_file:?}")]
BuildDepFile {
/// Underlying error
source: Box<Self>,
/// Dependencies file
deps_file: PathBuf,
},
/// Expand rule
#[from_fn(
fn expand_rule<T: Into<String>>(source: Self => Box::new(source))(
rule_name: T => rule_name.into()
)
)]
#[source(Some(&**source))]
#[fmt("Unable to expand rule {rule_name}")]
ExpandRule {
/// Underlying error
source: Box<Self>,
/// Rule name
rule_name: String,
},
/// Expand target
#[from_fn(
fn expand_target<'target, T: fmt::Display>(source: Self => Box::new(source))(
target: &'target Target<T> => target.to_string()
) + 'target
)]
#[source(Some(&**source))]
#[fmt("Unable to expand target {target}")]
ExpandTarget {
/// Underlying error
source: Box<Self>,
/// Formatted target
target: String,
},
/// Expand expression
#[from_fn(
fn expand_expr<'expr,>(source: Self => Box::new(source))(
expr: &'expr Expr => expr.to_string()
) + 'expr
)]
#[source(Some(&**source))]
#[fmt("Unable to expand expression {expr}")]
ExpandExpr {
/// Underlying error
source: Box<Self>,
/// Formatted expression
expr: String,
},
/// Unknown rule
#[source(None)]
#[fmt("Unknown rule {rule_name}")]
UnknownRule {
/// Rule name
rule_name: String,
},
/// Unknown alias
#[source(None)]
#[fmt("Unknown alias {alias_name}")]
UnknownAlias {
/// Alias name
alias_name: String,
},
/// Unknown pattern
#[source(None)]
#[fmt("Unknown pattern {pattern_name}")]
UnknownPattern {
/// Pattern name
pattern_name: String,
},
/// Unresolved alias or patterns
#[source(None)]
#[fmt("Expression had unresolved alias or patterns: {expr} ({expr_cmpts:?})")]
UnresolvedAliasOrPats {
/// Formatted expression
expr: String,
/// Components
expr_cmpts: Vec<String>,
},
/// Match expression had 2 or moore patterns
#[source(None)]
#[fmt("Match expression had 2 or more patterns: {expr} ({expr_cmpts:?})")]
MatchExprTooManyPats {
/// Formatted expression
expr: String,
/// Components
expr_cmpts: Vec<String>,
},
/// Alias operation
#[from_fn( fn alias_op(source: Self => Box::new(source))(op: AliasOp) )]
#[source(Some(&**source))]
#[fmt("Unable to apply alias operation `{op}`")]
AliasOp {
/// Underlying error
source: Box<Self>,
/// Operation
op: AliasOp,
},
/// Dependencies file missing `:`
#[source(None)]
#[fmt("Dependencies file {deps_file_path:?} was missing a `:`")]
DepFileMissingColon {
/// Dep file path
deps_file_path: PathBuf,
},
/// Dependencies file missing rule name
#[source(None)]
#[fmt("Dependencies file {deps_file_path:?} is missing the rule name {rule_name:?}, found {dep_output:?}")]
DepFileMissingRuleName {
/// Dep file path
deps_file_path: PathBuf,
/// Rule name
rule_name: String,
/// Dependencies file output
dep_output: String,
},
/// Dependencies file missing rule name
#[source(None)]
#[fmt("Dependencies file {deps_file_path:?} is missing any output of {rule_outputs:?}, found {dep_output:?}")]
DepFileMissingOutputs {
/// Dep file path
deps_file_path: PathBuf,
/// Rule outputs
rule_outputs: Vec<String>,
/// Dependency
dep_output: String,
},
/// Dependencies file empty
#[source(None)]
#[fmt("Dependencies file {deps_file_path:?} had no dependencies")]
DepFileEmpty {
/// Dep file path
deps_file_path: PathBuf,
},
/// Rule executable was empty
#[source(None)]
#[fmt("Rule {rule_name} executable as empty")]
RuleExecEmpty {
/// Rule name
rule_name: String,
},
/// Exit due to failed builds
#[source(None)]
#[fmt("Exiting with non-0 due to failed builds")]
ExitDueToFailedBuilds {},
/// Execution semaphore was closed
#[source(None)]
#[fmt("Execution semaphore was closed")]
ExecSemaphoreClosed {},
/// Found recursive rule
#[source(None)]
#[fmt("Found recursive rule: {target} (Parent rules: {})", parent_targets.iter().join(", "))]
FoundRecursiveRule {
/// Formatted recursive target
target: String,
/// Formatted parent targets
parent_targets: Vec<String>,
},
}
/// Helper function to format a `Command` for errors
fn cmd_to_string<T: fmt::Display>(cmd: &Command<T>) -> String {
let inner = cmd.args.iter().map(|arg| format!("\"{arg}\"")).join(" ");
format!("[{inner}]")
}
/// Helper type to collect a `IntoIter<Item = Result<T, AppError>>`
/// into a `Result<C, AppError::Multiple>`.
#[derive(Debug)]
pub enum ResultMultiple<C> {
Ok(C),
Err(Vec<AppError>),
}
impl<C, T> FromIterator<Result<T, AppError>> for ResultMultiple<C>
where
C: Default + Extend<T>,
{
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = Result<T, AppError>>,
{
// TODO: If we get any errors, don't allocate memory for the rest of the values?
let (values, errs) = iter.into_iter().partition_result::<C, Vec<_>, _, _>();
match errs.is_empty() {
true => Self::Ok(values),
false => Self::Err(errs),
}
}
}
#[derive(Debug)]
pub struct ResultMultipleResidue(Vec<AppError>);
impl<C> Try for ResultMultiple<C> {
type Output = C;
type Residual = ResultMultipleResidue;
fn from_output(output: Self::Output) -> Self {
Self::Ok(output)
}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
match self {
Self::Ok(values) => ControlFlow::Continue(values),
Self::Err(errs) => ControlFlow::Break(ResultMultipleResidue(errs)),
}
}
}
impl<T> FromResidual<ResultMultipleResidue> for ResultMultiple<T> {
fn from_residual(residual: ResultMultipleResidue) -> Self {
Self::Err(residual.0)
}
}
impl<T> FromResidual<ResultMultipleResidue> for Result<T, AppError> {
fn from_residual(residual: ResultMultipleResidue) -> Self {
let err = match <[_; 1]>::try_from(residual.0) {
Ok([err]) => err,
Err(errs) => {
assert!(!errs.is_empty(), "`ResultMultipleResidue` should hold at least 1 error");
AppError::Multiple(errs)
},
};
Err(err)
#[expect(clippy::derivable_impls, reason = "We want to be explicit")]
impl Default for AppErrorData {
fn default() -> Self {
Self { should_ignore: false }
}
}
/// Exit result
#[derive(Debug)]
pub enum ExitResult {
Ok,
Err(AppError),
@ -673,7 +36,7 @@ impl Termination for ExitResult {
match self {
Self::Ok => process::ExitCode::SUCCESS,
Self::Err(err) => {
eprintln!("Error: {}", err.pretty());
eprintln!("Error: {}", self::pretty(&err));
process::ExitCode::FAILURE
},
}
@ -683,218 +46,19 @@ impl Termination for ExitResult {
impl FromResidual<Result<Infallible, AppError>> for ExitResult {
fn from_residual(residual: Result<Infallible, AppError>) -> Self {
match residual {
#[expect(
unreachable_patterns,
reason = "We can't remove it yet until `never_patterns` gets better support"
)]
Ok(never) => match never {},
Err(err) => Self::Err(err),
}
}
}
/// Pretty display for [`AppError`]
#[derive(Debug)]
pub struct PrettyDisplay<'a> {
/// Root error
root: &'a AppError,
/// Whether we should show irrelevant errors.
show_irrelevant: bool,
}
#[derive(PartialEq, Clone, Copy, Debug)]
enum Column {
Line,
Empty,
}
impl Column {
/// Returns the string for this column
const fn as_str(self) -> &'static str {
match self {
Self::Line => "",
Self::Empty => " ",
}
impl FromResidual<Yeet<AppError>> for ExitResult {
fn from_residual(Yeet(err): Yeet<AppError>) -> Self {
Self::Err(err)
}
}
impl<'a> PrettyDisplay<'a> {
/// Creates a new pretty display
pub(crate) fn new(root: &'a AppError) -> Self {
// Get whether to show irrelevant errors from the environment
let var = env::var("ZBUILD_SHOW_IRRELEVANT_ERRS");
let show_irrelevant = match var {
Ok(mut s) => {
s.make_ascii_lowercase();
matches!(s.as_str(), "1" | "y" | "yes" | "true")
},
Err(_) => false,
};
Self { root, show_irrelevant }
}
/// Sets if we should show irrelevant errors during formatting
pub fn with_show_irrelevant(&mut self, show_irrelevant: bool) -> &mut Self {
self.show_irrelevant = show_irrelevant;
self
}
/// Formats a single error
// Note: Always prints, even if irrelevant. Only `fmt_multiple` actually filters
// any of it's entries for being irrelevant.
fn fmt_single(
&self,
f: &mut fmt::Formatter<'_>,
err: &AppError,
columns: &mut Vec<Column>,
total_ignored_errs: &mut usize,
) -> fmt::Result {
// If it's multiple, display it as multiple
if let AppError::Multiple(errs) = err {
return self.fmt_multiple(f, errs, columns, total_ignored_errs);
}
// Else write the top-level error
write!(f, "{err}")?;
// Then, if there's a cause, write the rest
if let Some(mut cur_source) = err.source() {
let starting_columns = columns.len();
loop {
// Print the pre-amble
f.pad("\n")?;
for c in &*columns {
f.pad(c.as_str())?;
}
f.pad("└─")?;
columns.push(Column::Empty);
// Then check if we got to a multiple.
match cur_source.downcast_ref::<AppError>() {
Some(AppError::Multiple(errs)) => {
self.fmt_multiple(f, errs, columns, total_ignored_errs)?;
break;
},
_ => write!(f, "{cur_source}",)?,
}
// And descend
cur_source = match cur_source.source() {
Some(source) => source,
_ => break,
};
}
let _: vec::Drain<'_, _> = columns.drain(starting_columns..);
}
Ok(())
}
/// Formats multiple errors
fn fmt_multiple(
&self,
f: &mut fmt::Formatter<'_>,
errs: &[AppError],
columns: &mut Vec<Column>,
total_ignored_errs: &mut usize,
) -> fmt::Result {
// Write the top-level error
write!(f, "Multiple errors:")?;
// For each error, write it
let mut ignored_errs = 0;
for (pos, err) in errs.iter().with_position() {
// If this error is irrelevant, continue
if !self.show_irrelevant && !self::err_contains_relevant(err) {
ignored_errs += 1;
continue;
}
f.pad("\n")?;
for c in &*columns {
f.pad(c.as_str())?;
}
// Note: We'll only print `└─` if we have no ignored errors, since if we do,
// we need that to print the final line showcasing how many we ignored
match ignored_errs == 0 && matches!(pos, ItertoolsPos::Last | ItertoolsPos::Only) {
true => {
f.pad("└─")?;
columns.push(Column::Empty);
},
false => {
f.pad("├─")?;
columns.push(Column::Line);
},
}
self.fmt_single(f, err, columns, total_ignored_errs)?;
let _: Option<_> = columns.pop();
}
if ignored_errs != 0 {
*total_ignored_errs += ignored_errs;
f.pad("\n")?;
for c in &*columns {
f.pad(c.as_str())?;
}
f.pad("└─")?;
write!(f, "({ignored_errs} irrelevant errors)")?;
}
Ok(())
}
}
impl fmt::Display for PrettyDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut columns = vec![];
let mut total_ignored_errs = 0;
self.fmt_single(f, self.root, &mut columns, &mut total_ignored_errs)?;
assert_eq!(columns.len(), 0, "There should be no columns after formatting");
if total_ignored_errs != 0 {
f.pad("\n")?;
write!(
f,
"Note: {total_ignored_errs} irrelevant errors were hidden, set `ZBUILD_SHOW_IRRELEVANT_ERRS=1` to \
show them"
)?;
}
Ok(())
}
}
/// Returns if this error contains any "relevant" errors.
///
/// In our case, the following cases are considered *irrelevant*:
/// - Is an [`AppError::BuildTarget`] with no source.
/// - Is an [`AppError::ExecSemaphoreClosed`].
fn err_contains_relevant(err: &AppError) -> bool {
// If we're multiple errors, return if any of them are relevant
if let AppError::Multiple(errs) = err {
return errs.iter().any(self::err_contains_relevant);
}
// Else if the error itself is irrelevant, return
if matches!(
err,
AppError::BuildTarget { source: None, .. } | AppError::ExecSemaphoreClosed {}
) {
return false;
}
// Else check the inner error, if any
if let Some(source) = err.source() &&
let Some(err) = source.downcast_ref::<AppError>()
{
return self::err_contains_relevant(err);
}
// Else, we're relevant
true
/// Function to setup pretty printing
pub fn pretty(err: &AppError) -> zutil_app_error::PrettyDisplay<'_, AppErrorData> {
err.pretty().with_ignore_err(|_, data| data.should_ignore)
}

View File

@ -3,31 +3,28 @@
// Imports
use {
crate::{
error::{AppError, ResultMultiple},
rules::{AliasOp, Command, DepItem, Exec, Expr, ExprCmpt, OutItem, Rule, Target},
AppError,
rules::{Command, DepItem, Expr, ExprCmpt, ExprOp, OutItem, Pattern, Rule, Target},
util::ArcStr,
},
indexmap::IndexMap,
smallvec::SmallVec,
std::{collections::BTreeMap, marker::PhantomData, mem, path::PathBuf, sync::Arc},
std::{collections::HashMap, mem, path::PathBuf},
zutil_app_error::{AllErrs, Context, app_error},
};
/// Expander
#[derive(Debug)]
pub struct Expander {
/// Phantom for `'s`
_phantom: PhantomData<&'static ()>,
}
pub struct Expander {}
#[expect(clippy::unused_self, reason = "Currently expander doesn't do anything")]
impl Expander {
/// Creates a new expander
pub const fn new() -> Self {
Self { _phantom: PhantomData }
Self {}
}
/// Expands an expression to it's components
pub fn expand_expr<T>(&self, expr: &Expr, visitor: &Visitor) -> Result<T, AppError>
pub fn expand_expr<T>(&self, expr: &Expr, visitor: &Visitor<'_>) -> Result<T, AppError>
where
T: TryFromExpr,
{
@ -41,25 +38,11 @@ impl Expander {
ExprCmpt::String(s) => expr.push_str(s),
// If it's a pattern, we visit it
// Note: We don't care about the operations on patterns, those are for matching
ExprCmpt::Pattern(pat) => match visitor.visit_pat(&pat.name) {
// If expanded, just replace it with a string
FlowControl::ExpandTo(value) => expr.push_str(value),
// Else keep on Keep and error on Error
FlowControl::Keep => expr.push(cmpt),
FlowControl::Error =>
return Err(AppError::UnknownPattern {
pattern_name: pat.name.to_string(),
}),
},
// If it's an alias, we visit and then expand it
ExprCmpt::Alias(alias) => match visitor.visit_alias(&alias.name) {
ExprCmpt::Ident { name, ops } => match visitor.visit_ident(name) {
// If expanded, check if we need to apply any operations
FlowControl::ExpandTo(alias_expr) => match alias.ops.is_empty() {
FlowControl::ExpandTo(expand_expr) => match ops.is_empty() {
// If not, just recursively expand it
true => expr.extend(self.expand_expr::<Expr>(alias_expr, visitor)?.cmpts),
true => expr.extend(self.expand_expr::<Expr>(&expand_expr, visitor)?.cmpts),
// Else expand it to a string, then apply all operations
// Note: We expand to string even if we don't *need* to to ensure the user doesn't
@ -67,15 +50,15 @@ impl Expander {
// we can't resolve the operations.
false => {
// Expand
let value = self.expand_expr::<ArcStr>(alias_expr, visitor)?;
let value = self.expand_expr::<ArcStr>(&expand_expr, visitor)?;
// Then apply all
let value = alias.ops.iter().try_fold(value, |mut value, &op| {
let value = ops.iter().try_fold(value, |mut value, &op| {
value
.with_mut(|s| self.expand_alias_op(op, s))
.map_err(AppError::alias_op(op))?;
.with_mut(|s| self.expand_expr_op(op, s))
.with_context(|| format!("Unable to apply expression operator `{op}`"))?;
Ok(value)
Ok::<_, AppError>(value)
})?;
expr.push_str(&value);
@ -84,29 +67,24 @@ impl Expander {
// Else keep on Keep and error on Error
FlowControl::Keep => expr.push(cmpt),
FlowControl::Error =>
return Err(AppError::UnknownAlias {
alias_name: alias.name.to_string(),
}),
FlowControl::Error => zutil_app_error::bail!("Unknown expression {name:?}"),
},
};
}
Ok(expr)
Ok::<_, AppError>(expr)
})?;
// Then try to parse from the expression
T::try_from_expr(expr)
}
/// Expands an alias operation on the value of that alias
fn expand_alias_op(&self, op: AliasOp, value: &mut String) -> Result<(), AppError> {
/// Expands an expression operation on the value of that expression
fn expand_expr_op(&self, op: ExprOp, value: &mut String) -> Result<(), AppError> {
match op {
AliasOp::DirName => {
ExprOp::DirName => {
// Get the path and try to pop the last segment
let mut path = PathBuf::from(mem::take(value));
if !path.pop() {
return Err(AppError::PathParent { path });
}
zutil_app_error::ensure!(path.pop(), "Path had no parent directory {path:?}");
// Then convert it back to a string
// Note: This should technically never fail, since the path was originally
@ -116,22 +94,16 @@ impl Expander {
.into_string()
.expect("utf-8 path was no longer utf-8 after getting dir-name");
},
};
}
Ok(())
}
/// Expands a rule of all it's aliases and patterns
pub fn expand_rule<T>(&self, rule: &Rule<Expr>, visitor: &Visitor) -> Result<Rule<T>, AppError>
pub fn expand_rule<T>(&self, rule: &Rule<Expr>, visitor: &Visitor<'_>) -> Result<Rule<T>, AppError>
where
T: TryFromExpr + Ord,
{
let aliases = rule
.aliases
.iter()
.map(|(name, expr)| Ok((name.clone(), self.expand_expr(expr, visitor)?)))
.collect::<ResultMultiple<_>>()?;
let output = rule
.output
.iter()
@ -141,7 +113,7 @@ impl Expander {
is_deps_file,
}),
})
.collect::<ResultMultiple<_>>()?;
.collect::<AllErrs<_, _>>()?;
let deps = rule
.deps
@ -152,66 +124,52 @@ impl Expander {
is_optional,
is_static,
is_deps_file,
} => Ok::<_, AppError>(DepItem::File {
} => Ok(DepItem::File {
file: self.expand_expr(file, visitor)?,
is_optional,
is_static,
is_deps_file,
}),
DepItem::Rule { ref name, ref pats } => {
let pats = pats
.iter()
.map(|(pat, expr)| Ok((self.expand_expr(pat, visitor)?, self.expand_expr(expr, visitor)?)))
.collect::<ResultMultiple<_>>()?;
Ok::<_, AppError>(DepItem::Rule {
name: self.expand_expr(name, visitor)?,
pats: Arc::new(pats),
})
},
DepItem::Rule { ref name } => Ok(DepItem::Rule { name: name.clone() }),
})
.collect::<ResultMultiple<_>>()?;
let exec = Exec {
cmds: rule
.exec
.cmds
.iter()
.map(|cmd| self.expand_cmd(cmd, visitor))
.collect::<ResultMultiple<_>>()?,
};
.collect::<AllErrs<_, _>>()?;
Ok(Rule {
name: rule.name.clone(),
aliases: Arc::new(aliases),
aliases: rule.aliases.clone(),
pats: rule.pats.clone(),
output,
deps,
exec,
exec: rule.exec.clone(),
})
}
/// Expands a command
pub fn expand_cmd<T>(&self, cmd: &Command<Expr>, visitor: &Visitor) -> Result<Command<T>, AppError>
pub fn expand_cmd<T>(&self, cmd: &Command<Expr>, visitor: &Visitor<'_>) -> Result<Command<T>, AppError>
where
T: TryFromExpr,
{
Ok(Command {
cwd: cmd.cwd.as_ref().map(|cwd| self.expand_expr(cwd, visitor)).transpose()?,
args: cmd
cwd: cmd.cwd.as_ref().map(|cwd| self.expand_expr(cwd, visitor)).transpose()?,
stdout: cmd.stdout.clone(),
args: cmd
.args
.iter()
.map(|arg| self.expand_expr(arg, visitor))
.collect::<ResultMultiple<_>>()?,
.collect::<AllErrs<_, _>>()?,
})
}
/// Expands a target expression
pub fn expand_target<T>(&self, target: &Target<Expr>, visitor: &Visitor) -> Result<Target<T>, AppError>
pub fn expand_target<T>(&self, target: &Target<Expr>, visitor: &Visitor<'_>) -> Result<Target<T>, AppError>
where
T: TryFromExpr,
{
let target = match *target {
Target::File { ref file, is_static } => Target::File {
file: self.expand_expr(file, visitor).map_err(AppError::expand_expr(file))?,
file: self
.expand_expr(file, visitor)
.with_context(|| format!("Unable to expand expression {file}"))?,
is_static,
},
@ -221,13 +179,16 @@ impl Expander {
.map(|(pat, expr)| {
Ok((
pat.clone(),
self.expand_expr(expr, visitor).map_err(AppError::expand_expr(expr))?,
self.expand_expr(expr, visitor)
.with_context(|| format!("Unable to expand expression {expr}"))?,
))
})
.collect::<ResultMultiple<_>>()?;
.collect::<AllErrs<_, _>>()?;
Target::Rule {
rule: self.expand_expr(rule, visitor).map_err(AppError::expand_expr(rule))?,
pats: Arc::new(pats),
rule: self
.expand_expr(rule, visitor)
.with_context(|| format!("Unable to expand expression {rule}"))?,
pats,
}
},
};
@ -251,7 +212,7 @@ pub enum FlowControl<T> {
impl<T> FlowControl<T> {
/// Converts a `&FlowControl<T>` to `FlowControl<&T>`
pub const fn as_ref(&self) -> FlowControl<&T> {
pub const fn _as_ref(&self) -> FlowControl<&T> {
match self {
Self::ExpandTo(value) => FlowControl::ExpandTo(value),
Self::Keep => FlowControl::Keep,
@ -273,76 +234,63 @@ impl TryFromExpr for Expr {
impl TryFromExpr for ArcStr {
fn try_from_expr(expr: Expr) -> Result<Self, AppError> {
expr.try_into_string().map_err(|expr| AppError::UnresolvedAliasOrPats {
expr: expr.to_string(),
expr_cmpts: expr.cmpts.into_iter().map(|cmpt| cmpt.to_string()).collect(),
expr.try_into_string().map_err(|expr| {
app_error!(
"Expression had unresolved aliases or patterns: {expr} ({:?})",
expr.cmpts.iter().map(ExprCmpt::to_string).collect::<Vec<_>>()
)
})
}
}
/// Visitor for [`Expander`]
#[derive(Clone, Debug)]
pub struct Visitor {
pub struct Visitor<'a> {
/// All aliases, in order to check
aliases: SmallVec<[Arc<IndexMap<ArcStr, Expr>>; 2]>,
aliases: SmallVec<[&'a HashMap<ArcStr, Expr>; 2]>,
/// All patterns, in order to check
pats: SmallVec<[Arc<BTreeMap<ArcStr, ArcStr>>; 1]>,
/// All unresolved patterns, in order to check
unresolved_pats: SmallVec<[&'a HashMap<ArcStr, Pattern>; 2]>,
/// Default alias action
default_alias: FlowControl<Expr>,
/// Default pattern action
default_pat: FlowControl<ArcStr>,
/// All resolved patterns
resolved_pats: SmallVec<[(ArcStr, ArcStr); 1]>,
}
impl Visitor {
impl<'a> Visitor<'a> {
/// Creates a new visitor with aliases and patterns
pub fn new<'a, A, P>(aliases: A, pats: P) -> Self
pub fn new<A, UP, RP>(aliases: A, unresolved_pats: UP, resolved_pats: RP) -> Self
where
A: IntoIterator<Item = &'a Arc<IndexMap<ArcStr, Expr>>>,
P: IntoIterator<Item = &'a Arc<BTreeMap<ArcStr, ArcStr>>>,
A: IntoIterator<Item = &'a HashMap<ArcStr, Expr>>,
UP: IntoIterator<Item = &'a HashMap<ArcStr, Pattern>>,
RP: IntoIterator<Item = SmallVec<[(ArcStr, ArcStr); 1]>>,
{
Self {
aliases: aliases.into_iter().map(Arc::clone).collect(),
pats: pats.into_iter().map(Arc::clone).collect(),
default_alias: FlowControl::Error,
default_pat: FlowControl::Error,
aliases: aliases.into_iter().collect(),
unresolved_pats: unresolved_pats.into_iter().collect(),
resolved_pats: resolved_pats.into_iter().flatten().collect(),
}
}
/// Creates a visitor from aliases
pub fn from_aliases<'a, A>(aliases: A) -> Self
where
A: IntoIterator<Item = &'a Arc<IndexMap<ArcStr, Expr>>>,
{
Self::new(aliases, [])
}
/// Visits an identifier
fn visit_ident(&self, name: &str) -> FlowControl<Expr> {
for (pat_name, pat) in &self.resolved_pats {
if name == &**pat_name {
return FlowControl::ExpandTo(Expr::string(pat.clone()));
}
}
/// Sets the default pattern
pub fn with_default_pat(self, default_pat: FlowControl<ArcStr>) -> Self {
Self { default_pat, ..self }
}
for pats in &self.unresolved_pats {
if pats.contains_key(name) {
return FlowControl::Keep;
}
}
/// Visits an alias
fn visit_alias(&self, alias_name: &str) -> FlowControl<&Expr> {
for aliases in &self.aliases {
if let Some(alias) = aliases.get(alias_name) {
return FlowControl::ExpandTo(alias);
if let Some(alias) = aliases.get(name) {
return FlowControl::ExpandTo(alias.clone());
}
}
self.default_alias.as_ref()
}
/// Visits a pattern
fn visit_pat(&self, pat_name: &str) -> FlowControl<&ArcStr> {
for pats in &self.pats {
if let Some(pat) = pats.get(pat_name) {
return FlowControl::ExpandTo(pat);
}
}
self.default_pat.as_ref()
FlowControl::Error
}
}

330
src/lib.rs Normal file
View File

@ -0,0 +1,330 @@
//! `Zbuild` build system
// Features
#![feature(
exit_status_error,
decl_macro,
box_patterns,
yeet_expr,
must_not_suspend,
assert_matches,
try_trait_v2,
if_let_guard,
pattern,
vec_into_raw_parts,
ptr_metadata,
extend_one,
try_blocks,
macro_metavar_expr,
macro_metavar_expr_concat,
substr_range,
unwrap_infallible,
never_type,
try_trait_v2_yeet
)]
// Lints
#![allow(
clippy::print_stdout,
clippy::print_stderr,
reason = "We're a binary that should talk to the user"
)]
// Modules
mod args;
mod ast;
mod build;
mod error;
mod expand;
mod rules;
mod util;
mod watcher;
// Exports
pub use self::{
args::Args,
error::{AppError, ExitResult},
};
// Imports
use {
self::{
ast::Ast,
build::{BuildReason, Builder},
expand::Expander,
rules::Rules,
},
futures::{StreamExt, TryFutureExt, stream::FuturesUnordered},
indicatif::ProgressBar,
smallvec::SmallVec,
std::{
env,
fmt,
path::{Path, PathBuf},
sync::Arc,
thread,
time::{Duration, Instant, SystemTime},
},
util::ArcStr,
watcher::Watcher,
zutil_app_error::Context,
};
#[expect(clippy::too_many_lines, reason = "TODO: Split it up more")]
pub async fn run(args: Args) -> Result<(), AppError> {
// Find the zbuild location and change the current directory to it
// TODO: Not adjust the zbuild path and read it before?
let zbuild_path = match args.zbuild_path {
Some(path) => path.canonicalize().context("Unable to canonicalize zbuild path")?,
None => self::find_zbuild().await?,
};
tracing::debug!(?zbuild_path, "Found zbuild path");
let zbuild_dir = zbuild_path.parent().expect("Zbuild path had no parent");
let zbuild_path = zbuild_path.file_name().expect("Zbuild path had no file name");
let zbuild_path = Path::new(zbuild_path);
tracing::debug!(?zbuild_dir, "Moving to zbuild directory");
env::set_current_dir(zbuild_dir).with_context(|| format!("Unable to set current directory to {zbuild_dir:?}"))?;
// Parse the ast
let ast = ast::parse(zbuild_path).context("Unable to parse zbuild file")?;
tracing::trace!(?ast, "Parsed ast");
// Create the expander
let expander = Expander::new();
// Build the rules
let rules = Rules::from_ast(ast).context("Unable to build rules")?;
tracing::trace!(?rules, "Built rules");
// Get the max number of jobs we can execute at once
let jobs = match args.jobs {
Some(0) => {
tracing::warn!("Cannot use 0 jobs, defaulting to 1");
1
},
Some(jobs) => jobs,
None => thread::available_parallelism()
.context("Unable to query system for available parallelism for default number of jobs")?
.into(),
};
tracing::debug!(?jobs, "Concurrent jobs");
// Then get all targets to build
let targets_to_build = match args.targets.is_empty() {
// If none were specified, use the default rules
true => rules.default.clone(),
// Else infer them as either rules or files
// TODO: Maybe be explicit about rule-name inferring?
// If a file has the same name as a rule, it may be
// unexpected behavior, but we can't just check if the
// file exists to disambiguate, because it might not be
// created yet
false => args
.targets
.into_iter()
.map(|target| {
rules.rules.get(target.as_str()).map_or_else(
// By default, use a file
|| rules::Target::File {
file: rules::Expr::string(target),
is_static: false,
},
// If there was a rule, use it without any patterns
// TODO: If it requires patterns maybe error out here?
|rule| rules::Target::Rule {
rule: rules::Expr::string(rule.name.clone()),
pats: SmallVec::new(),
},
)
})
.collect(),
};
tracing::trace!(
targets_to_build = ?targets_to_build.iter().map(<_>::to_string).collect::<Vec<_>>(),
"Found targets to build"
);
let progress_bar = ProgressBar::new(0).with_style(
#[expect(
clippy::literal_string_with_formatting_args,
reason = "`indicatif` formats these for us dynamically"
)]
indicatif::ProgressStyle::default_bar()
.progress_chars("=> ")
.template("[{elapsed:>3.green}/{duration:<3.black}] [{bar:50.black}] {human_pos:>7}/{human_len:<7}")
.expect("Invalid progress bar template"),
);
progress_bar.enable_steady_tick(Duration::from_millis(100));
// Create the builder
let builder = Builder::new(
jobs,
rules,
expander,
// Note: We should stop builds on the first error if we're *not* watching and the
// user doesn't want to keep going.
!args.watch && !args.keep_going,
args.always_build,
Some(progress_bar.clone()),
)
.context("Unable to create builder")?;
let builder = Arc::new(builder);
// Then create the watcher, if we're watching
let watcher = args
.watch
.then(|| {
// TODO: Better default?
let debouncer_timeout_ms = args.watcher_debouncer_timeout_ms.unwrap_or(10.0_f64);
let debouncer_timeout = { Duration::from_secs_f64(debouncer_timeout_ms / 1000.0) };
Watcher::new(builder.subscribe_events(), debouncer_timeout)
})
.transpose()?;
// Finally build all targets and start watching
let start_time = Instant::now();
let (failed_targets, ()) = futures::join!(
async {
targets_to_build
.iter()
.map(|target| {
self::build_target(&builder, target, args.ignore_missing).map_err(|err| (target.clone(), err))
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<Result<(), _>>>()
.await
.into_iter()
.filter_map(Result::err)
.collect::<Vec<_>>()
},
async {
if let Some(watcher) = watcher {
tracing::info!("Starting to watch for all targets");
watcher.watch_rebuild(&builder, args.ignore_missing).await;
}
}
);
let elapsed = start_time.elapsed();
progress_bar.finish_and_clear();
// Finally print some statistics
let targets = builder.build_results().await;
let total_targets = targets.len();
let built_targets = targets
.iter()
.filter_map(|(_, res)| res.as_ref())
.filter(|res| res.as_ref().is_ok_and(|res| res.built))
.count();
tracing::info!("Built {built_targets} targets in {elapsed:.2?}");
tracing::info!("Checked {total_targets} targets in {elapsed:.2?}");
match failed_targets.is_empty() {
true => Ok(()),
false => {
tracing::error!("One or more builds failed:");
for (target, err) in failed_targets {
tracing::error!(err=%error::pretty(&err), "Failed to build target {target}");
}
Err(AppError::msg("Exiting with non-0 due to failed builds"))
},
}
}
/// Finds the nearest zbuild file
async fn find_zbuild() -> Result<PathBuf, AppError> {
let cur_path = env::current_dir().context("Unable to get current directory")?;
let mut cur_path = cur_path.as_path();
loop {
let zbuild_path = cur_path.join("zbuild.zb");
match util::fs_try_exists_symlink(&zbuild_path)
.await
.with_context(|| format!("Unable to check if file exists {zbuild_path:?}"))?
{
true => return Ok(zbuild_path),
false => match cur_path.parent() {
Some(parent) => cur_path = parent,
None => zutil_app_error::bail!(
"No `zbuild.zb` file found in current or parent directories.\nYou can use `--path \
{{zbuild-path}}` in order to specify the manifest's path"
),
},
}
}
}
/// Builds a target.
async fn build_target<T: BuildableTargetInner + fmt::Display + fmt::Debug>(
builder: &Arc<Builder>,
target: &rules::Target<T>,
ignore_missing: bool,
) -> Result<(), AppError> {
tracing::debug!(%target, "Building target");
// Try to build the target
let build_start_time = SystemTime::now();
let res = T::build(target, builder, ignore_missing, BuildReason::empty()).await;
// Then check the status
match res {
Ok(build_res) => {
// If we actually built the rule, and it didn't just exist, log it
if build_res.built {
let build_duration = build_res
.build_time
.duration_since(build_start_time)
.unwrap_or(Duration::ZERO);
tracing::debug!("Built target {target} in {build_duration:.2?}");
println!("{target}");
}
Ok(())
},
Err(err) => {
tracing::error!(%target, err=%error::pretty(&err), "Unable to build target");
Err(err)
},
}
}
/// A buildable target inner type
trait BuildableTargetInner: Sized {
/// Builds this target
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError>;
}
impl BuildableTargetInner for rules::Expr {
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError> {
builder
.build_expr(target, ignore_missing, reason)
.await
.map(|(build_res, _)| build_res)
}
}
impl BuildableTargetInner for ArcStr {
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError> {
builder
.build(target, ignore_missing, reason)
.await
.map(|(build_res, _)| build_res)
}
}

View File

@ -7,7 +7,7 @@ mod pre_init;
// Imports
use {
anyhow::Context,
crate::AppError,
std::{
env::{self, VarError},
fs,
@ -16,7 +16,8 @@ use {
sync::Mutex,
},
tracing::metadata::LevelFilter,
tracing_subscriber::{prelude::*, EnvFilter, Registry},
tracing_subscriber::{EnvFilter, Registry, prelude::*},
zutil_app_error::Context,
};
/// Initializes the logger
@ -96,7 +97,7 @@ where
}
/// Creates the file layer
fn file_layer<S>(log_path: &Path) -> Result<impl tracing_subscriber::Layer<S>, anyhow::Error>
fn file_layer<S>(log_path: &Path) -> Result<impl tracing_subscriber::Layer<S>, AppError>
where
S: tracing::Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span> + 'static,
{

View File

@ -4,8 +4,8 @@
// Imports
use std::sync::{
atomic::{self, AtomicBool},
Mutex,
atomic::{self, AtomicBool},
};
/// If logging as already initialized

View File

@ -5,16 +5,12 @@
exit_status_error,
decl_macro,
box_patterns,
async_closure,
let_chains,
yeet_expr,
must_not_suspend,
strict_provenance,
assert_matches,
try_trait_v2,
if_let_guard,
pattern,
unsigned_signed_diff,
vec_into_raw_parts,
ptr_metadata
)]
@ -26,45 +22,18 @@
)]
// Modules
mod args;
mod ast;
mod build;
mod error;
mod expand;
mod logger;
mod rules;
mod util;
mod watcher;
// Imports
use {
self::{
ast::Ast,
build::{BuildReason, Builder},
error::{AppError, ExitResult},
expand::Expander,
rules::Rules,
},
anyhow::Context,
args::Args,
clap::Parser,
futures::{stream::FuturesUnordered, StreamExt, TryFutureExt},
std::{
collections::BTreeMap,
env,
fmt,
fs,
path::{Path, PathBuf},
sync::{
atomic::{self, AtomicUsize},
Arc,
},
thread,
time::{Duration, SystemTime},
sync::atomic::{self, AtomicUsize},
},
tokio::runtime,
util::ArcStr,
watcher::Watcher,
zbuild::{AppError, Args, ExitResult},
zutil_app_error::Context,
};
#[expect(
@ -105,247 +74,8 @@ fn main() -> ExitResult {
}
}
let runtime = runtime_builder
.build()
.context("Failed building the Runtime")
.map_err(AppError::Other)?;
let runtime = runtime_builder.build().context("Failed building the Runtime")?;
runtime.block_on(self::run(args))
}
#[expect(clippy::too_many_lines, reason = "TODO: Split it up more")]
async fn run(args: Args) -> ExitResult {
// Find the zbuild location and change the current directory to it
// TODO: Not adjust the zbuild path and read it before?
let zbuild_path = match args.zbuild_path {
Some(path) => path,
None => self::find_zbuild().await?,
};
tracing::debug!(?zbuild_path, "Found zbuild path");
let zbuild_dir = zbuild_path.parent().expect("Zbuild path had no parent");
let zbuild_path = zbuild_path.file_name().expect("Zbuild path had no file name");
let zbuild_path = Path::new(zbuild_path);
tracing::debug!(?zbuild_dir, "Moving to zbuild directory");
env::set_current_dir(zbuild_dir).map_err(AppError::set_current_dir(zbuild_dir))?;
// Parse the ast
let zbuild_file = fs::read_to_string(zbuild_path).map_err(AppError::read_file(&zbuild_path))?;
let zbuild_file = ArcStr::from(zbuild_file);
tracing::trace!(?zbuild_file, "Read zbuild.yaml");
let ast = serde_yaml::from_str::<Ast<'_>>(&zbuild_file).map_err(AppError::parse_yaml(&zbuild_path))?;
tracing::trace!(?ast, "Parsed ast");
// Create the expander
let expander = Expander::new();
// Build the rules
let rules = Rules::from_ast(&zbuild_file, ast);
tracing::trace!(?rules, "Built rules");
// Get the max number of jobs we can execute at once
let jobs = match args.jobs {
Some(0) => {
tracing::warn!("Cannot use 0 jobs, defaulting to 1");
1
},
Some(jobs) => jobs,
None => thread::available_parallelism()
.map_err(AppError::get_default_jobs())?
.into(),
};
tracing::debug!(?jobs, "Concurrent jobs");
// Then get all targets to build
let targets_to_build = match args.targets.is_empty() {
// If none were specified, use the default rules
true => rules.default.clone(),
// Else infer them as either rules or files
// TODO: Maybe be explicit about rule-name inferring?
// If a file has the same name as a rule, it may be
// unexpected behavior, but we can't just check if the
// file exists to disambiguate, because it might not be
// created yet
false => args
.targets
.into_iter()
.map(|target| {
rules.rules.get(target.as_str()).map_or_else(
// By default, use a file
|| rules::Target::File {
file: rules::Expr::string(target),
is_static: false,
},
// If there was a rule, use it without any patterns
// TODO: If it requires patterns maybe error out here?
|rule| rules::Target::Rule {
rule: rules::Expr::string(rule.name.clone()),
pats: Arc::new(BTreeMap::new()),
},
)
})
.collect(),
};
tracing::trace!(
targets_to_build = ?targets_to_build.iter().map(<_>::to_string).collect::<Vec<_>>(),
"Found targets to build"
);
// Create the builder
// Note: We should stop builds on the first error if we're *not* watching.
let builder = Builder::new(jobs, rules, expander, !args.watch)
.context("Unable to create builder")
.map_err(AppError::Other)?;
let builder = Arc::new(builder);
// Then create the watcher, if we're watching
let watcher = args
.watch
.then(|| {
// TODO: Better default?
let debouncer_timeout_ms = args.watcher_debouncer_timeout_ms.unwrap_or(10.0_f64);
let debouncer_timeout = { Duration::from_secs_f64(debouncer_timeout_ms / 1000.0) };
Watcher::new(builder.subscribe_events(), debouncer_timeout)
})
.transpose()?;
// Finally build all targets and start watching
let (failed_targets, ()) = futures::join!(
async {
targets_to_build
.iter()
.map(|target| {
self::build_target(&builder, target, args.ignore_missing).map_err(|err| (target.clone(), err))
})
.collect::<FuturesUnordered<_>>()
.collect::<Vec<Result<(), _>>>()
.await
.into_iter()
.filter_map(Result::err)
.collect::<Vec<_>>()
},
async {
if let Some(watcher) = watcher {
tracing::info!("Starting to watch for all targets");
watcher.watch_rebuild(&builder, args.ignore_missing).await;
}
}
);
// Finally print some statistics
let targets = builder.build_results().await;
let total_targets = targets.len();
let built_targets = targets
.iter()
.filter_map(|(_, res)| res.as_ref())
.filter(|res| res.as_ref().map_or(false, |res| res.built))
.count();
tracing::info!("Built {built_targets} targets");
tracing::info!("Checked {total_targets} targets");
match failed_targets.is_empty() {
true => ExitResult::Ok,
false => {
tracing::error!("One or more builds failed:");
for (target, err) in failed_targets {
tracing::error!(err=%err.pretty(), "Failed to build target {target}");
}
ExitResult::Err(AppError::ExitDueToFailedBuilds {})
},
}
}
/// Finds the nearest zbuild file
async fn find_zbuild() -> Result<PathBuf, AppError> {
let cur_path = env::current_dir().map_err(AppError::get_current_dir())?;
let mut cur_path = cur_path.as_path();
loop {
let zbuild_path = cur_path.join("zbuild.yaml");
match util::fs_try_exists_symlink(&zbuild_path)
.await
.map_err(AppError::check_file_exists(&zbuild_path))?
{
true => return Ok(zbuild_path),
false => match cur_path.parent() {
Some(parent) => cur_path = parent,
None => return Err(AppError::ZBuildNotFound {}),
},
}
}
}
/// Builds a target.
async fn build_target<T: BuildableTargetInner + fmt::Display + fmt::Debug>(
builder: &Arc<Builder>,
target: &rules::Target<T>,
ignore_missing: bool,
) -> Result<(), AppError> {
tracing::debug!(%target, "Building target");
// Try to build the target
let build_start_time = SystemTime::now();
let res = T::build(target, builder, ignore_missing, BuildReason::empty()).await;
// Then check the status
match res {
Ok(build_res) => {
// If we actually built the rule, and it didn't just exist, log it
if build_res.built {
let build_duration = build_res
.build_time
.duration_since(build_start_time)
.unwrap_or(Duration::ZERO);
tracing::debug!("Built target {target} in {build_duration:.2?}");
println!("{target}");
}
Ok(())
},
Err(err) => {
tracing::error!(%target, err=%err.pretty(), "Unable to build target");
Err(err)
},
}
}
/// A buildable target inner type
trait BuildableTargetInner: Sized {
/// Builds this target
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError>;
}
impl BuildableTargetInner for rules::Expr {
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError> {
builder
.build_expr(target, ignore_missing, reason)
.await
.map(|(build_res, _)| build_res)
}
}
impl BuildableTargetInner for ArcStr {
async fn build(
target: &rules::Target<Self>,
builder: &Arc<Builder>,
ignore_missing: bool,
reason: BuildReason,
) -> Result<build::BuildResult, AppError> {
builder
.build(target, ignore_missing, reason)
.await
.map(|(build_res, _)| build_res)
}
runtime.block_on(zbuild::run(args))?;
ExitResult::Ok
}

View File

@ -1,7 +1,6 @@
//! Rules
// Modules
mod alias;
mod expr;
mod item;
mod pattern;
@ -10,19 +9,17 @@ mod target;
// Exports
pub use {
alias::AliasOp,
expr::{Expr, ExprCmpt, ExprTree},
expr::{Expr, ExprCmpt, ExprOp, ExprTree},
item::{DepItem, OutItem},
pattern::PatternOp,
rule::{Command, Exec, Rule},
pattern::Pattern,
rule::{Command, Rule},
target::Target,
};
// Imports
use {
crate::{util::ArcStr, Ast},
indexmap::IndexMap,
std::sync::Arc,
crate::{AppError, Ast, util::ArcStr},
std::collections::HashMap,
};
/// Rules.
@ -35,42 +32,56 @@ pub struct Rules {
///
/// These are available for the whole program to
/// use.
pub aliases: Arc<IndexMap<ArcStr, Expr>>,
pub aliases: HashMap<ArcStr, Expr>,
/// Patterns.
///
/// These are available for the whole program to
/// use.
pub pats: HashMap<ArcStr, Pattern>,
/// Default targets to build
pub default: Vec<Target<Expr>>,
/// Rules
pub rules: IndexMap<ArcStr, Rule<Expr>>,
#[expect(clippy::struct_field_names, reason = "TODO: Rename struct name")]
pub rules: HashMap<ArcStr, Rule<Expr>>,
}
impl Rules {
/// Creates all rules from the ast
#[must_use]
pub fn from_ast(zbuild_file: &ArcStr, ast: Ast<'_>) -> Self {
pub fn from_ast(ast: Ast) -> Result<Self, AppError> {
let aliases = ast
.aliases
.into_iter()
.map(|(alias, value)| (zbuild_file.slice_from_str(alias), Expr::from_ast(zbuild_file, value)))
.map(|alias| (alias.name.0, Expr::from_ast(alias.value)))
.collect();
let pats = ast
.pats
.into_iter()
.map(|pat| {
(pat.name.0.clone(), Pattern {
name: pat.name.0,
non_empty: pat.non_empty,
})
})
.collect();
let default = ast
.default
.defaults
.into_iter()
.map(|target| Target::from_ast(zbuild_file, target))
.map(|target| Target::from_ast(target.default))
.collect();
let rules = ast
.rules
.into_iter()
.map(|(name, rule)| {
let name = zbuild_file.slice_from_str(name);
(name.clone(), Rule::from_ast(zbuild_file, name, rule))
})
.collect();
.map(|rule| try { (rule.name.0.clone(), Rule::from_ast(rule)?) })
.collect::<Result<_, AppError>>()?;
Self {
aliases: Arc::new(aliases),
Ok(Self {
aliases,
pats,
default,
rules,
}
})
}
}

View File

@ -1,45 +0,0 @@
//! Pattern
// Imports
use {crate::util::ArcStr, std::fmt};
/// Alias
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash, Debug)]
pub struct Alias {
/// Alias name
pub name: ArcStr,
/// Operators
pub ops: Vec<AliasOp>,
}
/// Alias operator
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash, Debug)]
pub enum AliasOp {
/// Directory name
DirName,
}
impl fmt::Display for Alias {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "$({}", self.name)?;
for op in &self.ops {
write!(f, "::{op}")?;
}
write!(f, ")")?;
Ok(())
}
}
impl fmt::Display for AliasOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::DirName => write!(f, "dir_name"),
}
}
}

View File

@ -8,10 +8,6 @@ pub use self::expr_tree::ExprTree;
// Imports
use {
super::{
alias::{Alias, AliasOp},
pattern::{Pattern, PatternOp},
},
crate::{ast, util::ArcStr},
std::fmt,
};
@ -22,11 +18,14 @@ pub enum ExprCmpt {
/// String
String(ArcStr),
/// Pattern
Pattern(Pattern),
/// Identifier
Ident {
/// Name
name: ArcStr,
/// Alias
Alias(Alias),
/// Operators
ops: Vec<ExprOp>,
},
}
impl ExprCmpt {
@ -39,35 +38,17 @@ impl ExprCmpt {
/// Returns this expression as a string, if it is one.
#[must_use]
pub const fn as_string(&self) -> Option<&ArcStr> {
#[expect(clippy::wildcard_enum_match_arm, reason = "We only care about a specific variant")]
match self {
Self::String(v) => Some(v),
_ => None,
Self::Ident { .. } => None,
}
}
/// Converts this component into a string, if it's a string.
pub fn try_into_string(self) -> Result<ArcStr, Self> {
#[expect(clippy::wildcard_enum_match_arm, reason = "We only care about a specific variant")]
match self {
Self::String(v) => Ok(v),
_ => Err(self),
}
}
/// Returns `true` if the component is [`ExprCmpt::Pattern`].
#[must_use]
pub const fn is_pattern(&self) -> bool {
matches!(self, Self::Pattern(_))
}
/// Returns this expression as a pattern, if it is one.
#[must_use]
pub const fn as_pattern(&self) -> Option<&Pattern> {
#[expect(clippy::wildcard_enum_match_arm, reason = "We only care about a specific variant")]
match self {
Self::Pattern(v) => Some(v),
_ => None,
Self::Ident { .. } => Err(self),
}
}
}
@ -99,7 +80,7 @@ impl Expr {
// If it's a string, try to use `push_str` for merging strings.
ExprCmpt::String(s) => self.push_str(s),
cmpt @ (ExprCmpt::Alias(_) | ExprCmpt::Pattern(_)) => self.cmpts.push(cmpt.clone()),
cmpt @ ExprCmpt::Ident { .. } => self.cmpts.push(cmpt.clone()),
}
}
@ -126,7 +107,7 @@ impl Expr {
// Otherwise, get the first string, if any, then push all other strings
let mut cmpts = self.cmpts.into_iter();
let Some(output) = cmpts.next() else {
return Ok("".into());
return Ok(String::new().into());
};
let mut output = output.try_into_string().expect("Component wasn't a string");
@ -142,30 +123,21 @@ impl Expr {
}
/// Creates a new expression from it's ast
pub fn from_ast(zbuild_file: &ArcStr, expr: ast::Expr<'_>) -> Self {
pub fn from_ast(expr: ast::Expr) -> Self {
let cmpts = expr
.cmpts
.into_iter()
.map(|cmpt| match cmpt {
ast::ExprCmpt::String(s) => ExprCmpt::String(zbuild_file.slice_from_str(s)),
ast::ExprCmpt::Pattern(ast::Pattern { name, ops }) => ExprCmpt::Pattern(Pattern {
name: zbuild_file.slice_from_str(name),
ast::ExprCmpt::String(s) => ExprCmpt::String(s),
ast::ExprCmpt::Ident { ident, ops } => ExprCmpt::Ident {
name: ident.0,
ops: ops
.into_iter()
.map(|op| match op {
ast::PatternOp::NonEmpty => PatternOp::NonEmpty,
ast::ExprOp::DirName => ExprOp::DirName,
})
.collect(),
}),
ast::ExprCmpt::Alias(ast::Alias { name, ops }) => ExprCmpt::Alias(Alias {
name: zbuild_file.slice_from_str(name),
ops: ops
.into_iter()
.map(|op| match op {
ast::AliasOp::DirName => AliasOp::DirName,
})
.collect(),
}),
},
})
.collect();
@ -200,8 +172,31 @@ impl fmt::Display for ExprCmpt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::String(s) => write!(f, "{s}"),
Self::Pattern(pat) => write!(f, "{pat}"),
Self::Alias(alias) => write!(f, "{alias}"),
Self::Ident { name, ops } => {
write!(f, "{{{name}")?;
for op in ops {
match op {
ExprOp::DirName => write!(f, ".dir_name")?,
}
}
write!(f, "}}")?;
Ok(())
},
}
}
}
/// Expression operators
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash, Debug)]
pub enum ExprOp {
/// Directory name, `.dir_name`.
DirName,
}
impl fmt::Display for ExprOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::DirName => write!(f, ".dir_name"),
}
}
}

View File

@ -2,13 +2,10 @@
use {
super::Expr,
crate::{
error::AppError,
rules::{pattern::Pattern, PatternOp},
util::ArcStr,
},
itertools::{Itertools, PeekingNext},
std::collections::BTreeMap,
crate::{AppError, rules::pattern::Pattern, util::ArcStr},
itertools::PeekingNext,
smallvec::SmallVec,
std::collections::HashMap,
};
/// An expression tree.
@ -16,65 +13,70 @@ use {
// Try to actually create a fast, non `O(n)`, algorithm?
#[derive(Debug)]
pub struct ExprTree<K> {
/// Prefixes
prefixes: PrefixTree<K>,
/// Matches
matches: HashMap<(ArcStr, ArcStr), (K, Option<Pattern>)>,
}
// TODO: Flatten this?
type PrefixTree<K> = BTreeMap<ArcStr, SuffixTree<K>>;
type SuffixTree<K> = BTreeMap<ArcStr, (Option<Pattern>, K)>;
impl<K> ExprTree<K> {
/// Creates a new, empty, expression tree
pub const fn new() -> Self {
pub fn new() -> Self {
Self {
prefixes: BTreeMap::new(),
matches: HashMap::new(),
}
}
/// Adds an expression to the suffix tree, associated with a key.
/// Adds an expression to the expression tree, associated with a key.
///
/// The expression must not contain any aliases.
///
/// Returns the old key if the expression already existed.
pub fn insert(&mut self, expr: &Expr, key: K) -> Result<Option<K>, AppError> {
pub fn insert(&mut self, expr: &Expr, key: K, pats: &[&HashMap<ArcStr, Pattern>]) -> Result<Option<K>, AppError> {
let mut cmpts = expr.cmpts.iter();
// Get all components from the start that are strings
let prefix = cmpts
.by_ref()
.peeking_take_while(|cmpt| cmpt.is_string())
.map(|cmpt| &**cmpt.as_string().expect("Just checked"))
.collect::<String>();
let prefix = ArcStr::from(prefix);
.peeking_next(|cmpt| cmpt.is_string())
.map(|cmpt| cmpt.as_string().expect("Just checked"))
.cloned()
.unwrap_or_default();
// Get the (possible) pattern in the middle
let pat = cmpts
.peeking_next(|cmpt| cmpt.is_pattern())
.map(|cmpt| cmpt.as_pattern().expect("Just checked"))
.peeking_next(|cmpt| match cmpt {
super::ExprCmpt::String(_) => false,
super::ExprCmpt::Ident { name, .. } => pats.iter().any(|pats| pats.contains_key(name)),
})
.map(|cmpt| match cmpt {
super::ExprCmpt::String(_) => unreachable!("Just checked"),
super::ExprCmpt::Ident { name, .. } => {
for pats in pats {
if let Some(pat) = pats.get(name) {
return pat;
}
}
unreachable!("Just checked")
},
})
.cloned();
// Then get the rest of the string
let suffix = cmpts
.peeking_take_while(|cmpt| cmpt.is_string())
.map(|cmpt| &**cmpt.as_string().expect("Just checked"))
.collect::<String>();
let suffix = ArcStr::from(suffix);
.peeking_next(|cmpt| cmpt.is_string())
.map(|cmpt| cmpt.as_string().expect("Just checked"))
.cloned()
.unwrap_or_default();
// After this the expression should be empty
if let Some(cmpt) = cmpts.next() {
return Err(AppError::Other(anyhow::anyhow!(
"Unexpected component in expression {expr}: {cmpt}"
)));
zutil_app_error::bail!("Unexpected component in expression {expr}: {cmpt}");
}
// Finally try to insert and retrieve the old key, if any.
let old_key = self
.prefixes
.entry(prefix)
.or_default()
.insert(suffix, (pat, key))
.map(|(_, old_key)| old_key);
.matches
.insert((prefix, suffix), (key, pat))
.map(|(old_key, _)| old_key);
Ok(old_key)
}
@ -82,50 +84,23 @@ impl<K> ExprTree<K> {
/// Matches a string against this expression tree.
///
/// Returns the first match with patterns resolved.
// TODO: Since we only support a single pattern, return `Option<ArcStr>` for it instead.
pub fn find(&self, value: &str) -> Option<(K, BTreeMap<ArcStr, ArcStr>)>
pub fn find(&self, value: &ArcStr) -> Option<(K, Patterns)>
where
K: Clone,
{
for (prefix, suffixes) in &self.prefixes {
for ((prefix, suffix), (key, pat)) in &self.matches {
// If the prefix no longer matches, try the next
let Some(value_rest) = value.strip_prefix(&**prefix) else {
let Some(value) = value.strip_prefix(&**prefix) else {
continue;
};
// Try to find match the suffixes
if let Some((key, pats)) = Self::find_match_suffix(value_rest, suffixes) {
return Some((key, pats));
}
}
None
}
/// Finds a matching suffix for `value` from the suffix map.
fn find_match_suffix(value: &str, suffixes: &SuffixTree<K>) -> Option<(K, BTreeMap<ArcStr, ArcStr>)>
where
K: Clone,
{
// Try to match against an empty suffix
// Note: We always do this, since some expressions could be
// of the form `(String, Pat, Empty)`, with `Pat` matching
// the rest.
if let Some((pat, key)) = suffixes.get("") &&
let Some(pats) = Self::find_match_pat("", pat)
{
return Some((key.clone(), pats));
}
// Otherwise, match against all other suffixes
for (suffix, (pat, key)) in suffixes {
// If the prefix no longer matches, try the next
let Some(pat_value) = value.strip_suffix(&**suffix) else {
let Some(value) = value.strip_suffix(&**suffix) else {
continue;
};
// Otherwise, we might have found the final value, so test it
if let Some(pats) = Self::find_match_pat(pat_value, pat) {
if let Some(pats) = Self::find_match_pat(&value, pat.as_ref()) {
return Some((key.clone(), pats));
}
}
@ -134,26 +109,20 @@ impl<K> ExprTree<K> {
}
/// Matches a pattern against a remaining value after it's prefix and suffix have been stripped
fn find_match_pat(value: &str, pat: &Option<Pattern>) -> Option<BTreeMap<ArcStr, ArcStr>> {
fn find_match_pat(value: &ArcStr, pat: Option<&Pattern>) -> Option<Patterns> {
let pats = match pat {
// If there is any pattern, try to match it
Some(pat) => {
for op in &pat.ops {
match op {
// If it needs to be non-empty, check
PatternOp::NonEmpty => match value.is_empty() {
true => return None,
false => continue,
},
}
if pat.non_empty && value.is_empty() {
return None;
}
BTreeMap::from([(pat.name.clone(), value.into())])
SmallVec::from([(pat.name.clone(), value.clone())])
},
// Otherwise, we match if the value is empty
_ => match value.is_empty() {
true => BTreeMap::new(),
None => match value.is_empty() {
true => SmallVec::new(),
false => return None,
},
};
@ -161,3 +130,6 @@ impl<K> ExprTree<K> {
Some(pats)
}
}
/// Patterns
pub type Patterns = SmallVec<[(ArcStr, ArcStr); 1]>;

View File

@ -3,8 +3,8 @@
// Imports
use {
super::Expr,
crate::{ast, util::ArcStr},
std::{collections::BTreeMap, fmt, sync::Arc},
crate::{AppError, ast, util::ArcStr},
std::fmt,
};
@ -23,17 +23,15 @@ pub enum OutItem<T> {
impl OutItem<Expr> {
/// Creates a new item from it's `ast`.
pub fn from_ast(zbuild_file: &ArcStr, item: ast::OutItem<'_>) -> Self {
match item {
ast::OutItem::File(file) => Self::File {
file: Expr::from_ast(zbuild_file, file),
is_deps_file: false,
},
ast::OutItem::DepsFile { deps_file } => Self::File {
file: Expr::from_ast(zbuild_file, deps_file),
is_deps_file: true,
},
}
pub fn from_ast(item: ast::Expr) -> Result<Self, AppError> {
let is_deps_file = item.is_deps_file;
zutil_app_error::ensure!(!item.is_opt, "Output items cannot be optional");
zutil_app_error::ensure!(!item.is_static, "Output items cannot be static");
Ok(Self::File {
file: Expr::from_ast(item),
is_deps_file,
})
}
}
@ -73,68 +71,28 @@ pub enum DepItem<T> {
/// Rule
Rule {
/// Rule name
name: T,
/// All rule patterns
pats: Arc<BTreeMap<T, T>>,
/// Name
name: ArcStr,
},
}
impl DepItem<Expr> {
/// Creates a new item from it's `ast`.
pub fn from_ast(zbuild_file: &ArcStr, item: ast::DepItem<'_>) -> Self {
match item {
ast::DepItem::File(file) => Self::File {
file: Expr::from_ast(zbuild_file, file),
is_optional: false,
is_static: false,
is_deps_file: false,
},
ast::DepItem::Rule { rule, pats } => {
let pats = pats
.into_iter()
.map(|(pat, value)| (Expr::from_ast(zbuild_file, pat), Expr::from_ast(zbuild_file, value)))
.collect();
Self::Rule {
name: Expr::from_ast(zbuild_file, rule),
pats: Arc::new(pats),
pub fn from_ast(dep: ast::DepStmt) -> Self {
match dep {
ast::DepStmt::File(dep) => {
let is_optional = dep.is_opt;
let is_static = dep.is_static;
let is_deps_file = dep.is_deps_file;
Self::File {
file: Expr::from_ast(dep),
is_optional,
is_static,
is_deps_file,
}
},
ast::DepItem::DepsFile { deps_file } => Self::File {
file: Expr::from_ast(zbuild_file, deps_file),
is_optional: false,
is_static: false,
is_deps_file: true,
},
ast::DepItem::Static { item: static_item } => match static_item {
ast::StaticDepItem::File(file) => Self::File {
file: Expr::from_ast(zbuild_file, file),
is_optional: false,
is_static: true,
is_deps_file: false,
},
ast::StaticDepItem::DepsFile { deps_file } => Self::File {
file: Expr::from_ast(zbuild_file, deps_file),
is_optional: false,
is_static: true,
is_deps_file: true,
},
},
ast::DepItem::Opt { item: opt_item } => match opt_item {
ast::OptDepItem::File(file) => Self::File {
file: Expr::from_ast(zbuild_file, file),
is_optional: true,
is_static: true,
is_deps_file: false,
},
ast::OptDepItem::DepsFile { deps_file } => Self::File {
file: Expr::from_ast(zbuild_file, deps_file),
is_optional: true,
is_static: true,
is_deps_file: true,
},
},
ast::DepStmt::Rule(name) => Self::Rule { name: name.0 },
}
}
}
@ -149,34 +107,22 @@ impl<T: fmt::Display> fmt::Display for DepItem<T> {
is_deps_file,
} => {
if is_optional {
write!(f, "opt: ")?;
write!(f, "opt ")?;
}
if is_static {
write!(f, "static: ")?;
write!(f, "static ")?;
}
if is_deps_file {
write!(f, "deps_file: ")?;
write!(f, "deps_file ")?;
}
write!(f, "{file}")?;
Ok(())
},
Self::Rule { ref name, ref pats } => {
write!(f, "rule: {name}")?;
if !pats.is_empty() {
write!(f, " (")?;
for (pat, value) in &**pats {
write!(f, "{pat}={value}, ")?;
}
write!(f, ")")?;
}
Ok(())
Self::Rule { ref name } => {
write!(f, "rule {name}")
},
}
}

View File

@ -9,23 +9,16 @@ pub struct Pattern {
/// Pattern name
pub name: ArcStr,
/// Operators
pub ops: Vec<PatternOp>,
}
/// Pattern operator
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash, Debug)]
pub enum PatternOp {
/// Non-empty
NonEmpty,
pub non_empty: bool,
}
impl fmt::Display for Pattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "^({}", self.name)?;
for op in &self.ops {
write!(f, "::{op}")?;
if self.non_empty {
write!(f, "::non_empty")?;
}
write!(f, ")")?;
@ -34,11 +27,3 @@ impl fmt::Display for Pattern {
Ok(())
}
}
impl fmt::Display for PatternOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::NonEmpty => write!(f, "non_empty"),
}
}
}

View File

@ -2,10 +2,9 @@
// Imports
use {
super::{DepItem, Expr, OutItem},
crate::{ast, util::ArcStr},
indexmap::IndexMap,
std::sync::Arc,
super::{DepItem, Expr, OutItem, pattern::Pattern},
crate::{AppError, ast, util::ArcStr},
std::collections::HashMap,
};
/// Rule
@ -15,7 +14,10 @@ pub struct Rule<T> {
pub name: ArcStr,
/// Aliases
pub aliases: Arc<IndexMap<ArcStr, T>>,
pub aliases: HashMap<ArcStr, Expr>,
/// Patterns
pub pats: HashMap<ArcStr, Pattern>,
/// Output items
pub output: Vec<OutItem<T>>,
@ -24,36 +26,45 @@ pub struct Rule<T> {
pub deps: Vec<DepItem<T>>,
/// Execution
pub exec: Exec<T>,
pub exec: Exec<Expr>,
}
impl Rule<Expr> {
/// Creates a new rule from it's ast
pub fn from_ast(zbuild_file: &ArcStr, name: ArcStr, rule: ast::Rule<'_>) -> Self {
pub fn from_ast(rule: ast::RuleStmt) -> Result<Self, AppError> {
let aliases = rule
.aliases
.into_iter()
.map(|(alias, expr)| (zbuild_file.slice_from_str(alias), Expr::from_ast(zbuild_file, expr)))
.map(|alias| (alias.name.0, Expr::from_ast(alias.value)))
.collect();
let pats = rule
.pats
.into_iter()
.map(|pat| {
(pat.name.0.clone(), Pattern {
name: pat.name.0,
non_empty: false,
})
})
.collect();
let output = rule
.out
.into_iter()
.map(|out| OutItem::from_ast(zbuild_file, out))
.collect();
let deps = rule
.deps
.into_iter()
.map(|dep| DepItem::from_ast(zbuild_file, dep))
.collect();
let exec = Exec::from_ast(zbuild_file, rule.exec);
.map(OutItem::from_ast)
.collect::<Result<_, AppError>>()?;
let deps = rule.deps.into_iter().map(DepItem::from_ast).collect();
let exec = Exec {
cmds: rule.exec.into_iter().map(Command::from_ast).collect(),
};
Self {
name,
aliases: Arc::new(aliases),
Ok(Self {
name: rule.name.0,
aliases,
pats,
output,
deps,
exec,
}
})
}
}
@ -65,42 +76,26 @@ pub struct Exec<T> {
pub cmds: Vec<Command<T>>,
}
impl Exec<Expr> {
/// Creates a new exec from it's ast
pub fn from_ast(zbuild_file: &ArcStr, exec: ast::Exec<'_>) -> Self {
Self {
cmds: exec
.cmds
.into_iter()
.map(|cmd| Command::from_ast(zbuild_file, cmd))
.collect(),
}
}
}
/// Command
#[derive(Clone, Debug)]
pub struct Command<T> {
/// Working directory
pub cwd: Option<T>,
/// Stdout
pub stdout: Option<ArcStr>,
/// All arguments
pub args: Vec<T>,
}
impl Command<Expr> {
/// Creates a new command from it's ast
pub fn from_ast(zbuild_file: &ArcStr, cmd: ast::Command<'_>) -> Self {
match cmd {
ast::Command::OnlyArgs(args) => Self {
cwd: None,
args: args.into_iter().map(|arg| Expr::from_ast(zbuild_file, arg)).collect(),
},
ast::Command::Full { cwd, args } => Self {
cwd: cwd.map(|cwd| Expr::from_ast(zbuild_file, cwd)),
args: args.into_iter().map(|arg| Expr::from_ast(zbuild_file, arg)).collect(),
},
pub fn from_ast(cmd: ast::Command) -> Self {
Self {
cwd: cmd.cwd.map(Expr::from_ast),
stdout: cmd.stdout.map(|stdout| stdout.0),
args: cmd.args.0.into_iter().map(Expr::from_ast).collect(),
}
}
}

View File

@ -7,12 +7,11 @@ use {
ast,
util::{self, ArcStr},
},
smallvec::SmallVec,
std::{
collections::BTreeMap,
fmt,
hash::{Hash, Hasher},
mem,
sync::Arc,
},
};
@ -34,7 +33,7 @@ pub enum Target<T> {
rule: T,
/// Patterns
pats: Arc<BTreeMap<ArcStr, T>>,
pats: SmallVec<[(ArcStr, T); 1]>,
},
}
@ -50,16 +49,10 @@ impl<T> Target<T> {
impl Target<Expr> {
/// Creates a new target from it's ast
pub fn from_ast(zbuild_file: &ArcStr, ast: ast::Target<'_>) -> Self {
match ast {
ast::Target::File(file) => Self::File {
file: Expr::from_ast(zbuild_file, file),
is_static: false,
},
ast::Target::Rule { rule } => Self::Rule {
rule: Expr::from_ast(zbuild_file, rule),
pats: Arc::new(BTreeMap::new()),
},
pub fn from_ast(target: ast::Expr) -> Self {
Self::File {
is_static: target.is_static,
file: Expr::from_ast(target),
}
}
}

View File

@ -131,11 +131,6 @@ where
let mut matches = s.match_indices(pat.clone());
// Find all matches, replacing the range as we go.
#[expect(
clippy::string_slice,
reason = "The index will always be valid, as it's the end of the string returned by `match_indices`, which \
must return substrings of the string"
)]
while let Some((pos, part)) = matches.next() {
// Replace the range
mem::drop(matches);

View File

@ -1,18 +1,18 @@
//! Arc string
// Lints
#![expect(unsafe_code, reason = "We need unsafe to implement our string 'cached' pointer")]
// Imports
use std::{
borrow::Borrow,
cmp,
fmt,
hash::{Hash, Hasher},
ops::Deref,
ptr::NonNull,
str::pattern::{Pattern, ReverseSearcher},
sync::Arc,
use {
std::{
borrow::Borrow,
cmp,
fmt,
hash::{Hash, Hasher},
mem,
ops::{Deref, Range},
str::pattern::{Pattern, ReverseSearcher},
sync::Arc,
},
yoke::Yoke,
};
/// Arc string.
@ -25,23 +25,18 @@ use std::{
/// accessible as a `String`.
#[derive(Clone)]
pub struct ArcStr {
/// This string's pointer
///
/// The string must *never* be mutated through this pointer,
/// due to it being possibly derived from a `&str`.
ptr: NonNull<str>,
/// Inner
#[expect(clippy::rc_buffer, reason = "We need it for efficient conversion to/from `String`")]
inner: Arc<String>,
// Note: We need an `Arc<String>` for efficient conversion to/from `String`
inner: Yoke<&'static str, Arc<String>>,
}
impl ArcStr {
/// Returns the offset of this string compared to the base
fn base_offset(&self) -> usize {
// SAFETY: `self.ptr` was derived from `inner.base_ptr`
let start = unsafe { self.ptr.as_ptr().byte_offset_from(self.inner.as_ptr()) };
usize::try_from(start).expect("String pointer was before base pointer")
/// Returns the range of this string compared to the base
fn base_range(&self) -> Range<usize> {
self.inner
.backing_cart()
.substr_range(self)
.expect("String pointer should be within allocation")
}
/// Updates this string as a `&mut String`.
@ -51,36 +46,32 @@ impl ArcStr {
where
F: FnOnce(&mut String) -> R,
{
// Get the offset and length of our specific string
let start = self.base_offset();
let len = self.len();
// Get the range of our specific string
let range = self.base_range();
// Get the inner string
let s = match Arc::get_mut(&mut self.inner) {
let mut inner = mem::take(self).inner.into_backing_cart();
let s = match Arc::get_mut(&mut inner) {
// If we're unique, slice the parts we don't care about and return
Some(s) => {
s.truncate(start + len);
let _ = s.drain(..start);
s.truncate(range.end);
let _ = s.drain(..range.start);
s
},
// Otherwise copy
None => {
self.inner = Arc::new(self.to_string());
Arc::get_mut(&mut self.inner).expect("Should be unique")
inner = Arc::new(inner[range].to_owned());
Arc::get_mut(&mut inner).expect("Should be unique")
},
};
// Since we're invalidating `self.inner`, replace `ptr`
// with a dummy value in case of panics.
self.ptr = NonNull::from("");
// Then mutate
let output = f(s);
// And finally, reconstruct ourselves
self.ptr = NonNull::from(s.as_str());
*self = Self::from(inner);
output
}
@ -90,23 +81,17 @@ impl ArcStr {
/// # Panics
/// `s` must be derived from this string, else this method panics.
pub fn slice_from_str(&self, s: &str) -> Self {
// Get pointer ranges
let self_range = self.as_bytes().as_ptr_range();
let s_range = s.as_bytes().as_ptr_range();
let range = self.substr_range(s).expect("Input was not a substring of this string");
let inner = self.inner.map_project_cloned(|s, _| &s[range]);
Self { inner }
}
assert!(
self_range.contains(&s_range.start) || s_range.start == self_range.end,
"String start was before this string"
);
assert!(
self_range.contains(&s_range.end) || s_range.end == self_range.end,
"String end was past this string"
);
Self {
ptr: NonNull::from(s),
inner: Arc::clone(&self.inner),
}
/// Slices this string
pub fn slice<S>(&self, slice: S) -> Self
where
str: std::ops::Index<S, Output = str>,
{
self.slice_from_str(&self[slice])
}
/// Wrapper for [`str::strip_prefix`]
@ -123,14 +108,6 @@ impl ArcStr {
}
}
// SAFETY: We're a self-referential `(&str, Arc<String>)`,
// which is comprised of `Send + Sync` types.
unsafe impl Send for ArcStr {}
// SAFETY: See above in [`Send`] impl
unsafe impl Sync for ArcStr {}
impl PartialEq for ArcStr {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
@ -176,8 +153,7 @@ impl Deref for ArcStr {
type Target = str;
fn deref(&self) -> &Self::Target {
// SAFETY: `self.ptr` always contains a valid `str`.
unsafe { self.ptr.as_ref() }
self.inner.get()
}
}
@ -189,36 +165,35 @@ impl Borrow<str> for ArcStr {
impl From<String> for ArcStr {
fn from(s: String) -> Self {
Self::from(Arc::new(s))
}
}
impl From<Arc<String>> for ArcStr {
fn from(s: Arc<String>) -> Self {
Self {
ptr: NonNull::from(s.as_str()),
inner: Arc::new(s),
inner: Yoke::attach_to_cart(s, |s| &**s),
}
}
}
impl From<ArcStr> for String {
fn from(s: ArcStr) -> Self {
// Get the offset and length of our specific string
let start = s.base_offset();
let len = s.len();
// Get the range of our specific string
let range = s.base_range();
match Arc::try_unwrap(s.inner) {
let inner = s.inner.into_backing_cart();
match Arc::try_unwrap(inner) {
// If we're unique, slice the parts we don't care about and return
Ok(mut inner) => {
inner.truncate(start + len);
let _ = inner.drain(..start);
inner.truncate(range.end);
let _ = inner.drain(..range.start);
inner
},
// Otherwise copy
Err(inner) => ArcStr { inner, ..s }.to_string(),
Err(inner) => inner[range].to_owned(),
}
}
}
impl From<&str> for ArcStr {
fn from(s: &str) -> Self {
s.to_owned().into()
}
}

View File

@ -8,11 +8,9 @@
// Imports
use {
crate::{build, rules::Target, util::ArcStr, AppError, Builder},
anyhow::Context,
crate::{AppError, Builder, build, rules::Target, util::ArcStr},
dashmap::{DashMap, DashSet},
futures::{stream::FuturesUnordered, StreamExt},
notify::Watcher as _,
futures::{StreamExt, stream::FuturesUnordered},
notify_debouncer_full::Debouncer,
std::{
io,
@ -22,6 +20,7 @@ use {
},
tokio::sync::mpsc,
tokio_stream::wrappers::ReceiverStream,
zutil_app_error::Context,
};
/// A reverse dependency
@ -36,8 +35,8 @@ struct RevDep {
/// Target watcher
pub struct Watcher {
/// Watcher
watcher: Debouncer<notify::RecommendedWatcher, notify_debouncer_full::FileIdMap>,
/// Inner watcher
inner: Debouncer<notify::RecommendedWatcher, notify_debouncer_full::RecommendedCache>,
/// Reverse dependencies
rev_deps: DashMap<PathBuf, RevDep>,
@ -62,23 +61,17 @@ impl Watcher {
for fs_event in fs_events {
tracing::trace!(?fs_event, "Watcher fs event");
#[expect(
let_underscore_drop,
clippy::let_underscore_must_use,
reason = "We don't care if it succeeded or not"
)]
let _: Result<(), _> = fs_event_tx.blocking_send(fs_event);
},
Err(errs) =>
for err in errs {
tracing::warn!(err=?anyhow::Error::from(err), "Error while watching");
for err in &errs {
tracing::warn!(err=?AppError::from(err), "Error while watching");
},
})
.context("Unable to create file watcher")
.map_err(AppError::Other)?;
.context("Unable to create file watcher")?;
Ok(Self {
watcher,
inner: watcher,
rev_deps: DashMap::new(),
fs_event_stream: ReceiverStream::new(fs_event_rx),
builder_event_rx,
@ -124,7 +117,7 @@ impl Watcher {
// TODO: Is this enough? What if the parent directory also gets deleted?
// should we watch directories until the root?
tracing::trace!(?dep_path, "Starting to watch path");
if let Err(err) = self.watcher.watcher().watch(
if let Err(err) = self.inner.watch(
dep_path.parent().unwrap_or(&dep_path),
notify::RecursiveMode::NonRecursive,
) {
@ -243,7 +236,6 @@ impl Watcher {
dep_parents
.iter()
.map(|target| async {
#[expect(clippy::let_underscore_must_use, reason = "We don't care if the build succeeds")]
let _: Result<(), _> = crate::build_target(builder, target, ignore_missing).await;
})
.collect::<FuturesUnordered<_>>()

43
tests/basic_multiple.rs Normal file
View File

@ -0,0 +1,43 @@
// Features
#![feature(must_not_suspend)]
// Lints
#![expect(clippy::tests_outside_test_module, reason = "We're an integration test")]
// Modules
mod util;
// Imports
use {
zbuild::ExitResult,
zutil_app_error::{Context, app_error},
};
/// Single rule with multiple outputs
#[tokio::test]
#[tracing_test::traced_test]
async fn basic_multiple() -> ExitResult {
let temp_dir = util::with_zbuild(
r#"
rule create_file {
out "file1.out";
out "file2.out";
exec "touch" "file1.out";
exec "touch" "file2.out";
}
"#,
// Note: Only request `file1.out`
["file1.out"],
)
.await?;
let file1_out = temp_dir.path().join("file1.out");
let file2_out = temp_dir.path().join("file2.out");
for file_out in [file1_out, file2_out] {
if !file_out.try_exists().context("Unable to check if output file exists")? {
Err(app_error!("Output file {file_out:?} was missing"))?;
}
}
ExitResult::Ok
}

38
tests/basic_none.rs Normal file
View File

@ -0,0 +1,38 @@
// Features
#![feature(must_not_suspend)]
// Lints
#![expect(clippy::tests_outside_test_module, reason = "We're an integration test")]
// Modules
mod util;
// Imports
use {
zbuild::ExitResult,
zutil_app_error::{Context, app_error},
};
/// Single rule and target
#[tokio::test]
#[tracing_test::traced_test]
async fn basic_none() -> ExitResult {
let temp_dir = util::with_zbuild(
r#"
rule create_file {
out "file.out";
exec "touch" "file.out";
}
"#,
// Note: We're passing no targets
[],
)
.await?;
// Note: We're making sure it *doesn't* exist, since we didn't want to build it.
let file_out = temp_dir.path().join("file.out");
if file_out.try_exists().context("Unable to check if output file exists")? {
Err(app_error!("Output file {file_out:?} was present"))?;
}
ExitResult::Ok
}

36
tests/basic_single.rs Normal file
View File

@ -0,0 +1,36 @@
// Features
#![feature(must_not_suspend)]
// Lints
#![expect(clippy::tests_outside_test_module, reason = "We're an integration test")]
// Modules
mod util;
// Imports
use {
zbuild::ExitResult,
zutil_app_error::{Context, app_error},
};
/// Single rule and target
#[tokio::test]
#[tracing_test::traced_test]
async fn basic_single() -> ExitResult {
let temp_dir = util::with_zbuild(
r#"
rule create_file {
out "file.out";
exec "touch" "file.out";
}
"#,
["file.out"],
)
.await?;
let file_out = temp_dir.path().join("file.out");
if !file_out.try_exists().context("Unable to check if output file exists")? {
Err(app_error!("Output file {file_out:?} was missing"))?;
}
ExitResult::Ok
}

54
tests/failure.rs Normal file
View File

@ -0,0 +1,54 @@
// Features
#![feature(must_not_suspend, yeet_expr)]
// Lints
#![expect(clippy::tests_outside_test_module, reason = "We're an integration test")]
// Modules
mod util;
// Imports
use {
std::fs,
tempfile::TempDir,
zbuild::{Args, ExitResult},
zutil_app_error::Context,
};
#[tokio::test]
#[tracing_test::traced_test]
async fn failure() -> ExitResult {
let temp_dir = TempDir::with_prefix("zbuild").context("Unable to create temporary directory")?;
let zbuild_zb = temp_dir.path().join("zbuild.zb");
// TODO: Instead of sleeping, use `inotify` to wait for other
// actions to happen?
fs::write(
&zbuild_zb,
r#"
rule a {
out "a";
exec "false";
exec "touch" "a";
}
"#,
)
.context("Unable to write zbuild manifest")?;
let args = Args {
targets: ["a".to_owned()].into(),
zbuild_path: Some(zbuild_zb),
..Args::default()
};
tracing::info!(?args, "Arguments");
let res = zbuild::run(args).await;
zutil_app_error::ensure!(res.is_err(), "Expected zbuild error");
let a = temp_dir.path().join("a");
zutil_app_error::ensure!(
!a.try_exists().context("Unable to check if output file exists")?,
"Output file {a:?} was created when it shouldn't've"
);
ExitResult::Ok
}

128
tests/keep_going.rs Normal file
View File

@ -0,0 +1,128 @@
// Features
#![feature(must_not_suspend, yeet_expr)]
// Lints
#![expect(clippy::tests_outside_test_module, reason = "We're an integration test")]
// Modules
mod util;
// Imports
use {
std::fs,
tempfile::TempDir,
zbuild::{AppError, Args, ExitResult},
zutil_app_error::Context,
};
/// Test for `--keep-going`
#[tokio::test]
#[tracing_test::traced_test]
async fn keep_going() -> ExitResult {
self::inner(true).await.context("Unable to test with `--keep-going`")?;
self::inner(false)
.await
.context("Unable to test without `--keep-going`")?;
ExitResult::Ok
}
/// Inner function to test
///
/// This works by having the following tree:
///
/// ```no_compile
/// A -> B
/// \-> C1 -> C2
/// ```
///
/// Where `B` is always going to fail, after 200ms, to allow all other targets
/// to start running.
///
/// We make `C2` take a long time, to ensure `B` is executed (and fails)
/// before it can return.
///
/// When testing with `keep_going = true`, we ensure that `C1` is still built,
/// despite `C2` only finishing *after* `B` errors out.
///
/// When testing with `keep_going = false`, we ensure that `C1` is not built,
/// since `C2` exits after `B` errors, so nothing else should be built.
async fn inner(keep_going: bool) -> Result<(), AppError> {
let temp_dir = TempDir::with_prefix("zbuild").context("Unable to create temporary directory")?;
let zbuild_zb = temp_dir.path().join("zbuild.zb");
// TODO: Instead of sleeping, use `inotify` to wait for other
// actions to happen?
fs::write(
&zbuild_zb,
r#"
rule a {
out "a";
dep "b";
dep "c1";
exec "touch" "a";
}
rule b {
out "b";
exec "sleep" "0.1";
exec "false";
exec "touch" "b";
}
rule c1 {
out "c1";
dep "c2";
exec "touch" "c1";
}
rule c2 {
out "c2";
exec "sleep" "0.2";
exec "touch" "c2";
}
"#,
)
.context("Unable to write zbuild manifest")?;
let args = Args {
targets: ["a".to_owned()].into(),
zbuild_path: Some(zbuild_zb),
keep_going,
..Args::default()
};
tracing::info!(?args, "Arguments");
let res = zbuild::run(args).await;
zutil_app_error::ensure!(res.is_err(), "Expected zbuild error");
let a = temp_dir.path().join("a");
let b = temp_dir.path().join("b");
let c1 = temp_dir.path().join("c1");
let c2 = temp_dir.path().join("c2");
zutil_app_error::ensure!(
!a.try_exists().context("Unable to check if output file exists")?,
"Output file {a:?} was created"
);
zutil_app_error::ensure!(
!b.try_exists().context("Unable to check if output file exists")?,
"Output file {b:?} was created"
);
match keep_going {
true => zutil_app_error::ensure!(
c1.try_exists().context("Unable to check if output file exists")?,
"Output file {c1:?} was missing"
),
false => zutil_app_error::ensure!(
!c1.try_exists().context("Unable to check if output file exists")?,
"Output file {c1:?} was created"
),
}
zutil_app_error::ensure!(
c2.try_exists().context("Unable to check if output file exists")?,
"Output file {c2:?} was missing"
);
Ok(())
}

36
tests/util/mod.rs Normal file
View File

@ -0,0 +1,36 @@
//! Utilities for all integration tests
// Lints
#![allow(
dead_code,
reason = "This module is used from many tests, which might not use everything"
)]
// Imports
use {
std::fs,
tempfile::TempDir,
zbuild::{AppError, Args},
zutil_app_error::Context,
};
/// Creates a directory with a zbuild manifest, then runs it, and returns the directory
pub async fn with_zbuild<'a, T>(zbuild_manifest: &str, targets: T) -> Result<TempDir, AppError>
where
T: AsRef<[&'a str]>,
{
let temp_dir = TempDir::with_prefix("zbuild").context("Unable to create temporary directory")?;
let zbuild_zb = temp_dir.path().join("zbuild.zb");
fs::write(&zbuild_zb, zbuild_manifest).context("Unable to write zbuild manifest")?;
let args = Args {
targets: targets.as_ref().iter().copied().map(str::to_owned).collect(),
zbuild_path: Some(zbuild_zb),
..Args::default()
};
tracing::info!(?args, "Arguments");
zbuild::run(args).await.context("Unable to run zbuild")?;
Ok(temp_dir)
}

View File

@ -0,0 +1,22 @@
{
"comments": {
"lineComment": "#"
},
"brackets": [
["(", ")"],
["[", "]"],
["{", "}"]
],
"autoClosingPairs": [
["{", "}"],
["[", "]"],
["(", ")"],
["\"", "\""]
],
"surroundingPairs": [
["{", "}"],
["[", "]"],
["(", ")"],
["\"", "\""]
]
}

View File

@ -0,0 +1,39 @@
{
"name": "zbuild",
"displayName": "Zbuild",
"description": "Syntax highlighting for zbuild files",
"version": "0.1.0",
"publisher": "zenithsiz",
"engines": {
"vscode": "^1.96.4"
},
"repository": {
"type": "git",
"url": "https://github.com/zenithsiz/zbuild"
},
"categories": [
"Programming Languages"
],
"contributes": {
"languages": [
{
"id": "zbuild",
"aliases": [
"Zbuild",
"zbuild"
],
"extensions": [
".zb"
],
"configuration": "./language-configuration.json"
}
],
"grammars": [
{
"language": "zbuild",
"scopeName": "source.zbuild",
"path": "./syntaxes/zbuild.tmLanguage.json"
}
]
}
}

View File

@ -0,0 +1,49 @@
{
"$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json",
"name": "zbuild",
"patterns": [
{ "include": "#keywords" },
{ "include": "#strings" },
{ "include": "#comment" }
],
"repository": {
"keywords": {
"patterns": [
{
"name": "keyword.control.zbuild",
"match": "="
},
{
"name": "keyword.other.zbuild",
"match": "\\b(alias|args|cwd|default|deps_file|dep|exec|include|non_empty|opt|out|pat|rule|static|stdout)\\b"
},
{
"name": "variable.name",
"match": "\\p{XID_Start}\\p{XID_Continue}*"
}
]
},
"strings": {
"name": "string.quoted.double.zbuild",
"begin": "\"",
"end": "\""
},
"comment": {
"patterns": [
{ "include": "#comment-block" },
{ "include": "#comment-line" }
]
},
"comment-block": {
"name": "comment.block.zbuild",
"begin": "###",
"end": "###"
},
"comment-line": {
"name": "comment.line.zbuild",
"begin": "#",
"end": "$"
}
},
"scopeName": "source.zbuild"
}