Compare commits

...

4 Commits

Author SHA1 Message Date
168a0032aa
Removed legacy crate. 2025-09-23 19:30:09 +01:00
b400c92054
Updated all dependencies. 2025-09-23 19:29:29 +01:00
d1f3ffeac7
Formatted all toml files. 2025-09-23 19:28:14 +01:00
796291e03c
Removed zutil-app-error. 2025-09-23 19:27:32 +01:00
47 changed files with 101 additions and 3093 deletions

13
.taplo.toml Normal file
View File

@ -0,0 +1,13 @@
[formatting]
indent_string = "\t"
align_entries = true
column_width = 120
[[rule]]
include = ["**/Cargo.toml"]
keys = ["dependencies", "workspace"]
[rule.formatting]
reorder_keys = true
reorder_arrays = true
reorder_inline_tables = true

View File

@ -1,46 +1,38 @@
[workspace]
members = [
"zutil-app-error",
"zutil-async-loadable",
"zutil-cloned",
"zutil-legacy",
]
members = ["zutil-async-loadable", "zutil-cloned"]
resolver = "2"
[workspace.dependencies]
# Workspace members
zutil-app-error = { path = "zutil-app-error" }
zutil-async-loadable = { path = "zutil-async-loadable" }
zutil-cloned = { path = "zutil-cloned" }
zutil-legacy = { path = "zutil-legacy" }
zutil-cloned = { path = "zutil-cloned" }
arrayref = "0.3.9"
ascii = "1.1.0"
derive_more = "1.0.0"
eframe = "0.29.1"
either = "1.13.0"
futures = "0.3.31"
int-conv = "0.1.4"
itertools = "0.14.0"
log = "0.4.22"
mappable-rc = "0.1.1"
native-dialog = "0.7.0"
parking_lot = "0.12.3"
pin-project = "1.1.7"
quote = "1.0.37"
ref-cast = "1.0.23"
sealed = "0.6.0"
serde = "1.0.216"
app-error = { git = "https://github.com/Zenithsiz/app-error", rev = "30238f3778fe84809ba2113c1199852b7bc7c1e9" }
arrayref = "0.3.9"
ascii = "1.1.0"
derive_more = "2.0.1"
eframe = "0.32.3"
either = "1.15.0"
futures = "0.3.31"
int-conv = "0.1.4"
itertools = "0.14.0"
log = "0.4.28"
mappable-rc = "0.1.1"
native-dialog = "0.9.0"
parking_lot = "0.12.4"
pin-project = "1.1.10"
quote = "1.0.40"
ref-cast = "1.0.24"
sealed = "0.6.0"
serde = "1.0.226"
stable_deref_trait = "1.2.0"
syn = "2.0.90"
thiserror = "2.0.7"
tokio = "1.42.0"
yoke = "0.7.5"
app-error = { git = "https://github.com/Zenithsiz/app-error", rev = "30238f3778fe84809ba2113c1199852b7bc7c1e9" }
syn = "2.0.106"
thiserror = "2.0.16"
tokio = "1.47.1"
yoke = "0.8.0"
[workspace.lints]

View File

@ -1,56 +1,56 @@
# We're fine with unstable features
unstable_features = true
binop_separator = "Back"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 2
brace_style = "SameLineWhere"
combine_control_expr = true
condense_wildcard_suffixes = true
control_brace_style = "AlwaysSameLine"
empty_item_single_line = true
binop_separator = "Back"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 2
brace_style = "SameLineWhere"
combine_control_expr = true
condense_wildcard_suffixes = true
control_brace_style = "AlwaysSameLine"
empty_item_single_line = true
enum_discrim_align_threshold = 100
error_on_line_overflow = false
error_on_unformatted = false
fn_params_layout = "Tall"
fn_single_line = false
force_explicit_abi = true
force_multiline_blocks = false
format_code_in_doc_comments = false
format_macro_bodies = true
format_macro_matchers = true
format_strings = true
group_imports = "Preserve"
hard_tabs = true
hex_literal_case = "Lower"
imports_granularity = "One"
imports_indent = "Block"
imports_layout = "HorizontalVertical"
indent_style = "Block"
inline_attribute_width = 0
match_arm_blocks = false
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
max_width = 120
merge_derives = false
newline_style = "Unix"
normalize_comments = false
normalize_doc_attributes = false
overflow_delimited_expr = true
remove_nested_parens = true
reorder_impl_items = true
reorder_imports = true
reorder_modules = true
space_after_colon = true
space_before_colon = false
spaces_around_ranges = false
error_on_line_overflow = false
error_on_unformatted = false
fn_params_layout = "Tall"
fn_single_line = false
force_explicit_abi = true
force_multiline_blocks = false
format_code_in_doc_comments = false
format_macro_bodies = true
format_macro_matchers = true
format_strings = true
group_imports = "Preserve"
hard_tabs = true
hex_literal_case = "Lower"
imports_granularity = "One"
imports_indent = "Block"
imports_layout = "HorizontalVertical"
indent_style = "Block"
inline_attribute_width = 0
match_arm_blocks = false
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
max_width = 120
merge_derives = false
newline_style = "Unix"
normalize_comments = false
normalize_doc_attributes = false
overflow_delimited_expr = true
remove_nested_parens = true
reorder_impl_items = true
reorder_imports = true
reorder_modules = true
space_after_colon = true
space_before_colon = false
spaces_around_ranges = false
struct_field_align_threshold = 20
struct_lit_single_line = true
trailing_comma = "Vertical"
trailing_semicolon = true
type_punctuation_density = "Wide"
use_field_init_shorthand = true
use_small_heuristics = "Default"
use_try_shorthand = true
where_single_line = false
wrap_comments = false
struct_lit_single_line = true
trailing_comma = "Vertical"
trailing_semicolon = true
type_punctuation_density = "Wide"
use_field_init_shorthand = true
use_small_heuristics = "Default"
use_try_shorthand = true
where_single_line = false
wrap_comments = false

View File

@ -1,12 +0,0 @@
[package]
name = "zutil-app-error"
version = "0.1.0"
edition = "2024"
[dependencies]
itertools = { workspace = true }
app-error = { workspace = true }
[lints]
workspace = true

View File

@ -1,6 +0,0 @@
//! App Error
//!
//! This crate is currently just a reexport of [`app_error`],
//! see that crate for documentation
pub use app_error::*;

View File

@ -1,24 +1,22 @@
[package]
name = "zutil-async-loadable"
name = "zutil-async-loadable"
version = "0.1.0"
edition = "2024"
[dependencies]
# Workspace
zutil-app-error = { workspace = true }
mappable-rc = { workspace = true }
parking_lot = { workspace = true, features = ["send_guard"] }
pin-project = { workspace = true }
app-error = { workspace = true }
mappable-rc = { workspace = true }
parking_lot = { features = ["send_guard"], workspace = true }
pin-project = { workspace = true }
stable_deref_trait = { workspace = true }
tokio = { workspace = true, features = ["sync", "rt"] }
yoke = { workspace = true, features = ["derive"] }
tokio = { features = ["rt", "sync"], workspace = true }
yoke = { features = ["derive"], workspace = true }
[dev-dependencies]
futures = { workspace = true }
tokio = { workspace = true, features = ["macros"] }
tokio = { workspace = true, features = ["macros"] }
[lints]

View File

@ -24,10 +24,10 @@ pub use self::{
// Imports
use {
self::res_arc_guard::ResArcGuard,
app_error::AppError,
parking_lot::Mutex,
std::{self, error::Error, fmt, ops::AsyncFnOnce, sync::Arc},
tokio::{sync::Notify, task},
zutil_app_error::AppError,
};
/// Inner

View File

@ -3,13 +3,13 @@
// Imports
use {
crate::res_arc_guard::ResArcGuard,
app_error::{AppError, app_error},
std::{
future::{Future, IntoFuture},
pin::Pin,
task::Poll,
},
tokio::task,
zutil_app_error::{AppError, app_error},
};
/// Load handle inner

View File

@ -5,7 +5,7 @@ use {
futures::FutureExt,
std::{pin::pin, sync::Arc},
tokio::sync::Mutex,
zutil_app_error::AppError,
app_error::AppError,
zutil_async_loadable::AsyncLoadable,
};

View File

@ -1,5 +1,5 @@
[package]
name = "zutil-cloned"
name = "zutil-cloned"
version = "0.1.0"
edition = "2021"
@ -9,7 +9,7 @@ proc-macro = true
[dependencies]
quote = { workspace = true }
syn = { workspace = true, features = ["full"] }
syn = { features = ["full"], workspace = true }
[lints]
workspace = true

View File

@ -1,30 +0,0 @@
[package]
authors = ["Filipe Rodrigues <filipejacintorodrigues1@gmail.com>"]
edition = "2018"
name = "zutil-legacy"
version = "0.1.0"
[dependencies]
arrayref = { workspace = true }
ascii = { workspace = true }
derive_more = { workspace = true, features = ["full"] }
eframe = { workspace = true, optional = true }
either = { workspace = true }
futures = { workspace = true, optional = true }
int-conv = { workspace = true }
log = { workspace = true }
native-dialog = { workspace = true, optional = true }
ref-cast = { workspace = true }
sealed = { workspace = true }
serde = { workspace = true, features = ["derive"], optional = true }
thiserror = { workspace = true }
[features]
alert = ["native-dialog"]
gui = ["eframe"]
use_futures = ["futures"]
use_serde = ["serde", "ascii/serde"]
[lints]
workspace = true

View File

@ -1,82 +0,0 @@
//! Alerts
use std::{borrow::Cow, fmt};
// Imports
use native_dialog::{MessageDialog, MessageType};
/// Alerts a message
fn alert(ty: MessageType, msg: fmt::Arguments) {
// Get the string to display without allocating if possible
let msg = msg.as_str().map_or_else(|| Cow::Owned(msg.to_string()), Cow::Borrowed);
MessageDialog::new()
.set_text(&*msg)
.set_type(ty)
.show_alert()
.expect("Unable to alert user");
}
/// Confirms a message
fn confirm(ty: MessageType, msg: fmt::Arguments) -> bool {
// Get the string to display without allocating if possible
let msg = msg.as_str().map_or_else(|| Cow::Owned(msg.to_string()), Cow::Borrowed);
MessageDialog::new()
.set_text(&*msg)
.set_type(ty)
.show_confirm()
.expect("Unable to confirm user")
}
/// Alerts an error
pub fn error(msg: fmt::Arguments) {
self::alert(MessageType::Error, msg);
}
/// Alerts an error with interpolation
pub macro error($($args:tt)*) {
$crate::alert::error(::std::format_args!($($args)*))
}
/// Alerts a warning
pub fn warn(msg: fmt::Arguments) {
self::alert(MessageType::Warning, msg);
}
/// Alerts a warning with interpolation
pub macro warn($($args:tt)*) {
$crate::alert::warn(::std::format_args!($($args)*))
}
/// Alerts info
pub fn info(msg: fmt::Arguments) {
self::alert(MessageType::Info, msg);
}
/// Alerts info with interpolation
pub macro info($($args:tt)*) {
$crate::alert::info(::std::format_args!($($args)*))
}
/// Alerts and requests a confirmation for a warning
#[must_use]
pub fn warn_confirm(msg: fmt::Arguments) -> bool {
self::confirm(MessageType::Warning, msg)
}
/// Alerts and requests a confirmation for a warning with interpolation
pub macro warn_confirm($($args:tt)*) {
$crate::alert::warn_confirm(::std::format_args!($($args)*))
}
/// Alerts and requests a confirmation for info
#[must_use]
pub fn info_confirm(msg: fmt::Arguments) -> bool {
self::confirm(MessageType::Info, msg)
}
/// Alerts and requests a confirmation for a info with interpolation
pub macro info_confirm($($args:tt)*) {
$crate::alert::info_confirm(::std::format_args!($($args)*))
}

View File

@ -1,60 +0,0 @@
//! Alphabet strings
/// Modules
pub mod arr;
mod error;
pub mod owned;
pub mod slice;
// Exports
pub use {arr::StrArrAlphabet, error::InvalidCharError, owned::StringAlphabet, slice::StrAlphabet};
/// A string alphabet
///
/// This trait is implemented by marker types that validate bytes as
/// part of their alphabet.
///
/// This is accomplished by the [`validate`](Alphabet::validate) method,
/// which simply checks if a byte slice is valid for this alphabet.
pub trait Alphabet {
/// Error type
type Error: std::error::Error + 'static;
/// Validates `bytes` for a string of this alphabet and returns
/// it, possibly without it's terminator.
fn validate(bytes: &[u8]) -> Result<&[u8], Self::Error>;
}
/// Implements the [`Alphabet`] trait from a list of valid characters
/// and a possible terminator
pub trait OnlyValidCharsAlphabet {
/// All valid characters
fn valid_chars() -> &'static [u8];
/// Terminator for the string.
fn terminator() -> u8;
}
impl<A: OnlyValidCharsAlphabet> Alphabet for A {
type Error = InvalidCharError;
fn validate(bytes: &[u8]) -> Result<&[u8], Self::Error> {
// Go through all bytes and validate them until end of
// string or terminator.
let terminator = Self::terminator();
for (pos, &byte) in bytes.iter().enumerate() {
// If we found the terminator, terminate
if byte == terminator {
return Ok(&bytes[..pos]);
}
// Else make sure it contains this byte
if !Self::valid_chars().contains(&byte) {
return Err(InvalidCharError { byte, pos });
}
}
// If we got, there was no terminator, which is still a valid string.
Ok(bytes)
}
}

View File

@ -1,83 +0,0 @@
//! String array
// Imports
use {
super::{Alphabet, StrAlphabet},
std::{fmt, marker::PhantomData, ops::Deref},
};
/// An alphabetic string array
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub struct StrArrAlphabet<A: Alphabet, const N: usize> {
/// Phantom
phantom: PhantomData<A>,
/// Bytes
bytes: [u8; N],
/// Length
len: usize,
}
impl<A: Alphabet, const N: usize> StrArrAlphabet<A, N> {
/// Parses a string from bytes
#[allow(clippy::shadow_unrelated)] // They're actually related
pub fn from_bytes(bytes: &[u8; N]) -> Result<Self, FromBytesError<A::Error>> {
// Validate the bytes with the alphabet
let valid_bytes = A::validate(bytes).map_err(FromBytesError::Validate)?;
// Try to copy the bytes over
let len = valid_bytes.len();
let mut bytes = [0; N];
bytes
.get_mut(..len)
.ok_or(FromBytesError::TooLong)?
.copy_from_slice(valid_bytes);
Ok(Self {
phantom: PhantomData,
bytes,
len,
})
}
/// Returns this string as a byte array, ignoring length
#[must_use]
pub fn as_bytes_arr(&self) -> &[u8; N] {
&self.bytes
}
}
impl<A: Alphabet, const N: usize> Deref for StrArrAlphabet<A, N> {
type Target = StrAlphabet<A>;
fn deref(&self) -> &Self::Target {
ref_cast::RefCast::ref_cast(&self.bytes.as_slice()[..self.len])
}
}
impl<A: Alphabet, const N: usize> fmt::Debug for StrArrAlphabet<A, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &StrAlphabet<A> = self;
write!(f, "{s:?}")
}
}
impl<A: Alphabet, const N: usize> fmt::Display for StrArrAlphabet<A, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &StrAlphabet<A> = self;
write!(f, "{s}")
}
}
/// Error type for [`StrArrAlphabet::from_bytes`]
#[derive(Debug, thiserror::Error)]
pub enum FromBytesError<E: std::error::Error> {
/// Unable to validate
#[error("Unable to validate")]
Validate(E),
/// Returned string was too long
#[error("Validated string was too long")]
TooLong,
}

View File

@ -1,13 +0,0 @@
//! Errors
/// Error for [`Alphabet::validate`](super::Alphabet::validate)'s impl of [`AlphabetA`](super::AlphabetA) and
/// [`AlphabetD`](super::AlphabetD)
#[derive(Debug, thiserror::Error)]
#[error("Invalid character '{byte:#x}' at index {pos}")]
pub struct InvalidCharError {
/// Invalid character
pub byte: u8,
/// Position
pub pos: usize,
}

View File

@ -1,49 +0,0 @@
//! String
// Imports
use {
super::{Alphabet, StrAlphabet},
std::{fmt, marker::PhantomData, ops::Deref},
};
/// An alphabetic owned string
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct StringAlphabet<A: Alphabet> {
/// Phantom
phantom: PhantomData<A>,
/// Bytes
bytes: Vec<u8>,
}
impl<A: Alphabet> StringAlphabet<A> {
/// Parses a string from bytes
pub fn from_bytes(bytes: &[u8]) -> Result<Self, A::Error> {
A::validate(bytes).map(|bytes| Self {
phantom: PhantomData,
bytes: bytes.to_vec(),
})
}
}
impl<A: Alphabet> Deref for StringAlphabet<A> {
type Target = StrAlphabet<A>;
fn deref(&self) -> &Self::Target {
ref_cast::RefCast::ref_cast(self.bytes.as_slice())
}
}
impl<A: Alphabet> fmt::Debug for StringAlphabet<A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &StrAlphabet<A> = self;
write!(f, "{s:?}")
}
}
impl<A: Alphabet> fmt::Display for StringAlphabet<A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &StrAlphabet<A> = self;
write!(f, "{s}")
}
}

View File

@ -1,85 +0,0 @@
//! String slice
// Imports
use {
super::Alphabet,
ref_cast::RefCast,
std::{borrow::Cow, fmt, marker::PhantomData},
};
/// An alphabetic string slice
// TODO: Not expose `ref_cast` to the outside, as it breaks
// this string's encapsulation.
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[derive(ref_cast::RefCast)]
#[repr(transparent)]
pub struct StrAlphabet<A: Alphabet> {
/// Phantom
phantom: PhantomData<A>,
/// Bytes
bytes: [u8],
}
impl<A: Alphabet> StrAlphabet<A> {
/// Returns the bytes from this string
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
&self.bytes
}
/// Parses a string from bytes
pub fn from_bytes(bytes: &[u8]) -> Result<&Self, A::Error> {
A::validate(bytes).map(Self::ref_cast)
}
/// Writes this string to bytes
///
/// # Panics
/// Panics if `self` and `bytes` are different lengths
pub fn write_bytes(&self, bytes: &mut [u8]) {
bytes.copy_from_slice(self.as_bytes());
}
/// Returns this string as a lossy `str`
#[must_use]
pub fn as_lossy_str(&self) -> Cow<'_, str> {
String::from_utf8_lossy(self.as_bytes())
}
/// Returns the length of this string
#[must_use]
pub fn len(&self) -> usize {
self.as_bytes().len()
}
/// Returns if this string is empty
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<A: Alphabet> PartialEq<[u8]> for StrAlphabet<A> {
fn eq(&self, other: &[u8]) -> bool {
self.bytes.eq(other)
}
}
impl<A: Alphabet, const N: usize> PartialEq<[u8; N]> for StrAlphabet<A> {
fn eq(&self, other: &[u8; N]) -> bool {
self.bytes.eq(other)
}
}
impl<A: Alphabet> fmt::Debug for StrAlphabet<A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.as_lossy_str())
}
}
impl<A: Alphabet> fmt::Display for StrAlphabet<A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_lossy_str())
}
}

View File

@ -1,119 +0,0 @@
//! Array splitters
// Export `arrayref` to use in macros
#[doc(hidden)]
pub use ::arrayref;
/// Splits an array into various members
#[macro_export]
macro_rules! array_split {
(
$arr:expr,
$(
$name:ident :
$( [$arr_size:expr] )?
$( $val_size:literal )?
),* $(,)?
) => {{
// Struct holding all fields
#[derive(Clone, Copy, Debug)]
struct Fields<'a, T> {
$(
$name:
$( &'a [T; $arr_size], )?
$( &'a T, #[cfg(invalid)] __field: [u8; $val_size], )?
)*
__phantom: ::std::marker::PhantomData<&'a T>,
}
// Get everything from `array_refs`
#[allow(
clippy::used_underscore_binding,
clippy::ptr_offset_with_cast,
clippy::indexing_slicing,
)]
let (
$(
$name
),*
) = $crate::array_split::arrayref::array_refs!(
$arr,
$(
$( $arr_size )?
$( $val_size )?
),*
);
// And return the fields
Fields {
$(
$name
$( : &( $name[$val_size - $val_size] ) )?
,
)*
__phantom: ::std::marker::PhantomData,
}
}}
}
/// Splits an array into various members mutably
#[allow(clippy::module_name_repetitions)] // `_mut` version should be in the same module
#[macro_export]
macro_rules! array_split_mut {
(
$arr:expr,
$(
$name:ident :
$( [$arr_size:expr] )?
$( $val_size:literal )?
),* $(,)?
) => {{
// Struct holding all fields
#[derive(Debug)]
struct Fields<'a, T> {
$(
$name:
$( &'a mut [T; $arr_size], )?
// Note: This `cfg` is simply done so that `__field` never appears.
// The `__field` serves to identify when this part should be written.
$( &'a mut T, #[cfg(invalid)] __field: [u8; $val_size], )?
)*
__phantom: ::std::marker::PhantomData<&'a mut T>,
}
// Get everything from `mut_array_refs`
#[allow(
clippy::used_underscore_binding,
clippy::ptr_offset_with_cast,
clippy::indexing_slicing,
)]
let (
$(
$name
),*
) = $crate::array_split::arrayref::mut_array_refs!(
$arr,
$(
$( $arr_size )?
$( $val_size )?
),*
);
// And return the fields
Fields {
$(
$name
// Note: This serves to turn a `&mut [u8; 1]` into a `&mut u8`.
$( : &mut ( $name[$val_size - $val_size] ) )?
,
)*
__phantom: ::std::marker::PhantomData,
}
}}
}

View File

@ -1,458 +0,0 @@
//! Ascii string backed by an array
// Modules
mod error;
#[cfg(feature = "use_serde")]
mod visitor;
// Exports
pub use {
ascii::AsciiChar,
error::{FromBytesError, FromUtf8Error, NotAsciiError, TooLongError},
};
// Imports
#[cfg(feature = "use_serde")]
use visitor::DeserializerVisitor;
use {
ascii::AsciiStr,
std::{
cmp::Ordering,
convert::TryFrom,
fmt,
hash::Hash,
ops::{self, Range},
slice::SliceIndex,
},
};
/// An ascii string backed by an array
#[derive(Clone, Copy)]
pub struct AsciiStrArr<const N: usize> {
/// Characters
chars: [AsciiChar; N],
/// Size
// Invariant: `self.len <= N`
len: usize,
}
// Constructors
impl<const N: usize> AsciiStrArr<N> {
/// Creates a new empty string
#[must_use]
pub const fn new() -> Self {
Self {
chars: [AsciiChar::Null; N],
len: 0,
}
}
}
/// String lengths
impl<const N: usize> AsciiStrArr<N> {
/// The capacity of the string
pub const CAPACITY: usize = N;
/// Returns the length of this string
#[must_use]
pub const fn len(&self) -> usize {
self.len
}
/// Returns the capacity of the string, `N`
#[must_use]
pub const fn capacity() -> usize {
Self::CAPACITY
}
/// Returns if this string is empty
#[must_use]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
/// Trims the end of the string from 'ch'
pub fn trim_end(&mut self, ch: AsciiChar) {
while !self.is_empty() && self.as_ascii().last() == Some(ch) {
self.len -= 1;
}
}
/// Returns a string, trimmed of `ch` on the end
#[must_use]
pub fn trimmed_end(mut self, ch: AsciiChar) -> Self {
self.trim_end(ch);
self
}
}
/// Conversions to other string types
impl<const N: usize> AsciiStrArr<N> {
/// Converts this string to a `&AsciiStr`
#[must_use]
pub fn as_ascii(&self) -> &AsciiStr {
// Get all the initialized elements
// Note: `self.len <= N`, so this cannot panic.
let chars = self.chars.get(..self.len).expect("Length was larger than `N`");
<&AsciiStr>::from(chars)
}
/// Converts this string to a `&mut AsciiStr`
#[must_use]
pub fn as_ascii_mut(&mut self) -> &mut AsciiStr {
// Get all the initialized elements
// Note: `self.len <= N`, so this cannot panic.
let chars = self.chars.get_mut(..self.len).expect("Length was larger than `N`");
<&mut AsciiStr>::from(chars)
}
/// Converts this string to a `&[AsciiChar]`
#[must_use]
pub fn as_ascii_slice(&self) -> &[AsciiChar] {
self.as_ascii().as_slice()
}
/// Converts this string to a `&mut [AsciiChar]`
#[must_use]
pub fn as_ascii_slice_mut(&mut self) -> &mut [AsciiChar] {
self.as_ascii_mut().as_mut_slice()
}
/// Converts this string to a `&[u8]`
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
self.as_ascii().as_bytes()
}
/// Converts this string to a `&str`
#[must_use]
pub fn as_str(&self) -> &str {
self.as_ascii().as_str()
}
}
/// Conversions from other strings
impl<const N: usize> AsciiStrArr<N> {
/// Creates a string from anything that coerces to `&[AsciiChar]`, including `AsciiStr`
pub fn from_ascii<S: ?Sized + AsRef<[AsciiChar]>>(ascii: &S) -> Result<Self, TooLongError<N>> {
let ascii = ascii.as_ref();
// If it has too many elements, return Err
if ascii.len() > N {
return Err(TooLongError::<N>);
}
// Else create an uninitialized array and copy over the initialized characters
let mut chars = [AsciiChar::Null; N];
for (uninit, &ascii) in chars.iter_mut().zip(ascii) {
*uninit = ascii;
}
Ok(Self {
chars,
len: ascii.len(),
})
}
/// Creates a string from bytes
pub fn from_bytes<B: ?Sized + AsRef<[u8]>>(bytes: &B) -> Result<Self, FromBytesError<N>> {
// Get the bytes as ascii first
let ascii = AsciiStr::from_ascii(bytes)
.map_err(ascii::AsAsciiStrError::valid_up_to)
.map_err(|pos| NotAsciiError { pos })
.map_err(FromBytesError::NotAscii)?;
// Then try to convert them
Self::from_ascii(ascii).map_err(FromBytesError::TooLong)
}
// Note: No `from_str`, implemented using `FromStr`
}
/// Slicing
impl<const N: usize> AsciiStrArr<N> {
/// Slices this string, if in bounds
#[must_use]
pub fn get<I: SliceIndex<[AsciiChar]>>(&self, idx: I) -> Option<&I::Output> {
idx.get(&self.chars)
}
/// Slices this string mutably, if in bounds
#[must_use]
pub fn get_mut<I: SliceIndex<[AsciiChar]>>(&mut self, idx: I) -> Option<&mut I::Output> {
idx.get_mut(&mut self.chars)
}
}
/// Push/Pop
impl<const N: usize> AsciiStrArr<N> {
/// Pushes a character onto this string, if there is enough space
#[allow(clippy::result_unit_err)] // TODO: An error type for this?
pub fn push(&mut self, ch: AsciiChar) -> Result<(), ()> {
match self.len == N {
true => Err(()),
false => {
self.chars[self.len] = ch;
self.len += 1;
Ok(())
},
}
}
/// Pushes a string onto this string, if there is enough space
#[allow(clippy::result_unit_err)] // TODO: An error type for this?
pub fn push_str(&mut self, s: &AsciiStr) -> Result<(), ()> {
match self.len + s.len() > N {
true => Err(()),
false => {
self.chars[self.len..(self.len + s.len())].copy_from_slice(s.as_slice());
self.len += s.len();
Ok(())
},
}
}
/// Inserts a character onto the string, if there is enough space
///
/// # Panics
/// Panics if `idx` is out of bounds.
#[allow(clippy::result_unit_err)] // TODO: An error type for this?
pub fn insert(&mut self, idx: usize, ch: AsciiChar) -> Result<(), ()> {
match self.len == N {
true => Err(()),
false => {
self.chars.copy_within(idx..self.len, idx + 1);
self.chars[idx] = ch;
self.len += 1;
Ok(())
},
}
}
/// Removes a range of characters
///
/// # Panics
/// Panics if `range` is out of bounds.
pub fn drain_range(&mut self, range: Range<usize>) {
assert!(range.end <= self.len);
self.chars.copy_within(range.end..self.len, range.start);
self.len -= range.end - range.start;
}
/// Replaces all instances of a character with another
pub fn replace_inplace(&mut self, from: AsciiChar, to: AsciiChar) {
for ch in &mut self.chars[..self.len] {
if *ch == from {
*ch = to;
}
}
}
}
impl<const N: usize> AsRef<AsciiStr> for AsciiStrArr<N> {
fn as_ref(&self) -> &AsciiStr {
self.as_ascii()
}
}
impl<const N: usize> AsMut<AsciiStr> for AsciiStrArr<N> {
fn as_mut(&mut self) -> &mut AsciiStr {
self.as_ascii_mut()
}
}
impl<const N: usize> AsRef<[AsciiChar]> for AsciiStrArr<N> {
fn as_ref(&self) -> &[AsciiChar] {
self.as_ascii_slice()
}
}
impl<const N: usize> AsMut<[AsciiChar]> for AsciiStrArr<N> {
fn as_mut(&mut self) -> &mut [AsciiChar] {
self.as_ascii_slice_mut()
}
}
impl<const N: usize> AsRef<[u8]> for AsciiStrArr<N> {
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl<const N: usize> AsRef<str> for AsciiStrArr<N> {
fn as_ref(&self) -> &str {
self.as_str()
}
}
// Note: No `AsMut<[u8]>` nor `AsMut<str>`, as that'd allow for modification
// outside of ascii.
impl<const N: usize> PartialEq for AsciiStrArr<N> {
fn eq(&self, other: &Self) -> bool {
AsciiStr::eq(self.as_ascii(), other.as_ascii())
}
}
impl<const N: usize> Eq for AsciiStrArr<N> {}
impl<const N: usize> PartialEq<str> for AsciiStrArr<N> {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl<const N: usize> PartialOrd for AsciiStrArr<N> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<const N: usize> Ord for AsciiStrArr<N> {
fn cmp(&self, other: &Self) -> Ordering {
AsciiStr::cmp(self.as_ascii(), other.as_ascii())
}
}
impl<const N: usize> Hash for AsciiStrArr<N> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
AsciiStr::hash(self.as_ascii(), state);
}
}
impl<const N: usize> Default for AsciiStrArr<N> {
fn default() -> Self {
Self::new()
}
}
impl<I, const N: usize> ops::Index<I> for AsciiStrArr<N>
where
I: SliceIndex<[AsciiChar]>,
{
type Output = <I as SliceIndex<[AsciiChar]>>::Output;
fn index(&self, idx: I) -> &Self::Output {
self.get(idx).expect("Invalid index access")
}
}
impl<I, const N: usize> ops::IndexMut<I> for AsciiStrArr<N>
where
I: SliceIndex<[AsciiChar]>,
{
fn index_mut(&mut self, idx: I) -> &mut Self::Output {
self.get_mut(idx).expect("Invalid index access")
}
}
impl<const N: usize> fmt::Debug for AsciiStrArr<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
AsciiStr::fmt(self.as_ascii(), f)
}
}
impl<const N: usize> fmt::Display for AsciiStrArr<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
AsciiStr::fmt(self.as_ascii(), f)
}
}
#[cfg(feature = "use_serde")]
impl<'de, const N: usize> serde::Deserialize<'de> for AsciiStrArr<N> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_str(DeserializerVisitor)
}
}
#[cfg(feature = "use_serde")]
impl<const N: usize> serde::Serialize for AsciiStrArr<N> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
// Serialize as an ascii string
serializer.serialize_str(self.as_str())
}
}
// TODO: Generalize this to `impl<const N: usize, const M: usize> From<&[AsciiChar; M]> for AsciiStrArr<N> where M <= N`
impl<const N: usize> From<&[AsciiChar; N]> for AsciiStrArr<N> {
fn from(src: &[AsciiChar; N]) -> Self {
<Self as From<[AsciiChar; N]>>::from(*src)
}
}
// TODO: Generalize this to `impl<const N: usize, const M: usize> From<[AsciiChar; M]> for AsciiStrArr<N> where M <= N`
impl<const N: usize> From<[AsciiChar; N]> for AsciiStrArr<N> {
fn from(chars: [AsciiChar; N]) -> Self {
Self { chars, len: N }
}
}
// TODO: Generalize this to `impl<const N: usize, const M: usize> TryFrom<&[u8; M]> for AsciiStrArr<N> where M <= N`
impl<const N: usize> TryFrom<&[u8; N]> for AsciiStrArr<N> {
type Error = NotAsciiError;
fn try_from(byte_str: &[u8; N]) -> Result<Self, Self::Error> {
let mut chars = [AsciiChar::Null; N];
for (pos, (&byte, ascii)) in byte_str.iter().zip(&mut chars).enumerate() {
*ascii = AsciiChar::from_ascii(byte).map_err(|_err| NotAsciiError { pos })?;
}
Ok(Self {
chars,
len: byte_str.len(),
})
}
}
impl<const N: usize> TryFrom<&AsciiStr> for AsciiStrArr<N> {
type Error = TooLongError<N>;
fn try_from(ascii: &AsciiStr) -> Result<Self, Self::Error> {
Self::from_ascii(ascii)
}
}
impl<const N: usize> TryFrom<&[u8]> for AsciiStrArr<N> {
type Error = FromBytesError<N>;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
Self::from_bytes(bytes)
}
}
impl<const N: usize> TryFrom<&str> for AsciiStrArr<N> {
type Error = FromUtf8Error<N>;
fn try_from(s: &str) -> Result<Self, Self::Error> {
Self::from_bytes(s.as_bytes())
}
}
impl<const N: usize> TryFrom<&std::ffi::OsStr> for AsciiStrArr<N> {
type Error = FromBytesError<N>;
fn try_from(s: &std::ffi::OsStr) -> Result<Self, Self::Error> {
// TODO: Not allocate here, although `OsStr` doesn't provide a `as_bytes` impl, so we can't do much
Self::from_bytes(s.to_string_lossy().as_bytes())
}
}
impl<const N: usize> std::str::FromStr for AsciiStrArr<N> {
type Err = FromUtf8Error<N>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::from_bytes(s.as_bytes())
}
}

View File

@ -1,32 +0,0 @@
//! Errors
/// The given string was too long to be converted.
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
#[derive(thiserror::Error)]
#[error("String must be at most {} characters", LEN)]
pub struct TooLongError<const LEN: usize>;
/// The given string has non-ascii characters.
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
#[derive(thiserror::Error)]
#[error("Character at pos {pos} was not ascii")]
pub struct NotAsciiError {
/// Index of the first non-ascii character
pub pos: usize,
}
/// Error returned when converting a byte string to an [`AsciiStrArr`](super::AsciiStrArr).
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
#[derive(thiserror::Error)]
pub enum FromBytesError<const LEN: usize> {
/// Too long
#[error("String was too long")]
TooLong(TooLongError<LEN>),
/// Not ascii
#[error("String contained non-ascii characters")]
NotAscii(NotAsciiError),
}
/// Error returned when converting a utf-8 [`String`] to an [`AsciiStrArr`](super::AsciiStrArr).
pub type FromUtf8Error<const LEN: usize> = FromBytesError<LEN>;

View File

@ -1,28 +0,0 @@
//! Visitor for [`AsciiStrArr`]
// Imports
use {
super::AsciiStrArr,
ascii::AsciiStr,
std::{convert::TryFrom, fmt},
};
/// Visitor implementation
pub(super) struct DeserializerVisitor<const N: usize>;
impl<'de, const N: usize> serde::de::Visitor<'de> for DeserializerVisitor<N> {
type Value = AsciiStrArr<N>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
// TODO: Maybe get the full string at compile time and use `write_str`
f.write_fmt(format_args!("An ascii string of length {} or less", N))
}
fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
// Convert it to ascii
let ascii_str = AsciiStr::from_ascii(value).map_err(E::custom)?;
// Try to convert it
AsciiStrArr::try_from(ascii_str).map_err(E::custom)
}
}

View File

@ -1,56 +0,0 @@
//! Ascii text buffer
// Imports
use crate::{ascii_str_arr::AsciiChar, AsciiStrArr};
/// An ascii text buffer
#[derive(PartialEq, Default, Clone, Debug, derive_more::Display)]
#[derive(ref_cast::RefCast)]
#[repr(transparent)]
pub struct AsciiTextBuffer<const N: usize>(pub AsciiStrArr<N>);
// Truncates any extra characters and ignores non-ascii
impl<const N: usize> From<String> for AsciiTextBuffer<N> {
fn from(s: String) -> Self {
let mut buffer = Self::default();
for ch in s.chars() {
match AsciiChar::from_ascii(ch) {
Ok(ch) => match buffer.0.push(ch) {
Ok(_) => continue,
Err(_) => break,
},
Err(_) => continue,
}
}
buffer
}
}
impl<const N: usize> From<AsciiTextBuffer<N>> for String {
fn from(buffer: AsciiTextBuffer<N>) -> Self {
buffer.as_ref().to_owned()
}
}
impl<const N: usize> AsRef<str> for AsciiTextBuffer<N> {
fn as_ref(&self) -> &str {
self.0.as_str()
}
}
// Note: In ascii, the character index is the same
// as the byte index.
impl<const N: usize> eframe::egui::widgets::TextBuffer for AsciiTextBuffer<N> {
fn insert_text(&mut self, text: &str, ch_idx: usize) -> usize {
text.chars()
.filter_map(|ch| AsciiChar::from_ascii(ch).ok())
.enumerate()
.take_while(|&(idx, ch)| self.0.insert(ch_idx + idx, ch).is_ok())
.count()
}
fn delete_char_range(&mut self, ch_range: std::ops::Range<usize>) {
self.0.drain_range(ch_range);
}
}

View File

@ -1,31 +0,0 @@
//! Binary coded decimal
/// A `BCD` u8 type
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug)]
pub struct BcdU8(pub u8);
impl BcdU8 {
/// Returns this bcd as a normal integer
#[must_use]
pub const fn to_u8(self) -> Option<u8> {
let lo = self.0 & 0xf;
let hi = (self.0 & 0xf0) >> 4u8;
match (lo, hi) {
(0..=9, 0..=9) => Some(lo + hi * 10),
_ => None,
}
}
/// Create a bcd from a normal integer
#[must_use]
pub const fn from_u8(value: u8) -> Option<Self> {
if value >= 100 {
return None;
}
let lo = value % 10;
let hi = value / 10;
Some(Self(lo | (hi << 4)))
}
}

View File

@ -1,65 +0,0 @@
//! `BTreeMap<K, Vec<V>>`.
// Imports
use std::{borrow::Borrow, collections::BTreeMap, iter::FromIterator, ops::RangeBounds};
/// A b-tree map with `Vec<V>` values, sorted by
/// insertion order.
#[derive(PartialEq, Clone, Debug)]
pub struct BTreeMapVector<K, V> {
/// The underlying map
map: BTreeMap<K, Vec<V>>,
}
impl<K, V> BTreeMapVector<K, V> {
/// Creates a new, empty map.
#[must_use]
pub fn new() -> Self
where
K: Ord,
{
Self { map: BTreeMap::new() }
}
/// Returns a range of this map
pub fn range<T, R>(&self, range: R) -> impl DoubleEndedIterator<Item = (&K, &V)>
where
T: Ord + ?Sized,
R: RangeBounds<T>,
K: Borrow<T> + Ord,
{
self.map
.range(range)
.flat_map(|(k, values)| values.iter().map(move |v| (k, v)))
}
/// Inserts a key-value pair into the map
pub fn insert(&mut self, key: K, value: V)
where
K: Ord,
V: Ord,
{
let values = self.map.entry(key).or_default();
values.push(value);
values.sort_unstable(); // TODO: Not sort on every insert
}
}
impl<K: Ord, V> Default for BTreeMapVector<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K: Ord, V: Ord> FromIterator<(K, V)> for BTreeMapVector<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut map = BTreeMapVector::new();
for (key, value) in iter {
map.insert(key, value);
}
map
}
}

View File

@ -1,130 +0,0 @@
//! Cached value
// Imports
use std::{
hash::Hash,
marker::{PhantomData, Tuple},
ops,
};
/// A cached, update-able value
// TODO: Switch to only requiring `<T, Args>` but allow for `for<'a> (&'a u32)` and stuff.
pub struct CachedValue<T, F> {
/// Value
value: T,
/// Hash of inputs
input_hash: u64,
/// Phantom data
phantom: PhantomData<F>,
}
impl<T, F> CachedValue<T, F> {
/// Creates a new cached value from arguments
pub fn new<Args: Tuple + Hash, F2>(args: Args, f: F2) -> Self
where
F: Fn<Args>,
F2: FnOnce<Args, Output = T>,
{
Self::try_new(args, FnResultWrapper(f)).into_ok()
}
/// Tries to creates a new cached value from arguments
pub fn try_new<Args: Tuple + Hash, E, F2>(args: Args, f: F2) -> Result<Self, E>
where
F: Fn<Args>,
F2: FnOnce<Args, Output = Result<T, E>>,
{
// Get the hash of the input
let input_hash = crate::hash_of(&args);
// Then try to get the value
let value = f.call_once(args)?;
Ok(Self {
value,
input_hash,
phantom: PhantomData,
})
}
/// Updates a cached value given it's arguments and function
pub fn update<Args: Tuple + Hash, F2>(this: &mut Self, args: Args, f: F2)
where
F: Fn<Args>,
F2: FnOnce<Args, Output = T>,
{
Self::try_update(this, args, FnResultWrapper(f)).into_ok();
}
/// Tries to update a cached value given it's arguments and function
pub fn try_update<Args: Tuple + Hash, E, F2>(this: &mut Self, args: Args, f: F2) -> Result<(), E>
where
F: Fn<Args>,
F2: FnOnce<Args, Output = Result<T, E>>,
{
// If the hash of the inputs is the same, return
let input_hash = crate::hash_of(&args);
if input_hash == this.input_hash {
return Ok(());
}
// Else update our value and our hash
// Note: Only update the hash if we successfully got the value
this.value = f.call_once(args)?;
this.input_hash = input_hash;
Ok(())
}
/// Creates or updates a cached value
pub fn new_or_update<Args: Tuple + Hash, F2>(this: &mut Option<Self>, args: Args, f: F2) -> &mut Self
where
F: Fn<Args>,
F2: FnOnce<Args, Output = T>,
{
Self::try_new_or_update(this, args, FnResultWrapper(f)).into_ok()
}
/// Tries to create or updates a cached value
pub fn try_new_or_update<Args: Tuple + Hash, E, F2>(
this: &mut Option<Self>,
args: Args,
f: F2,
) -> Result<&mut Self, E>
where
F: Fn<Args>,
F2: FnOnce<Args, Output = Result<T, E>>,
{
// Note: Checking first saves a hash check on `Self::update`
match this {
Some(this) => {
Self::try_update(this, args, f)?;
Ok(this)
},
None => Self::try_new(args, f).map(move |value| this.insert(value)),
}
}
}
impl<T, F> ops::Deref for CachedValue<T, F> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
/// Wraps a function that returns `T` to make it return `Result<T, !>`
struct FnResultWrapper<F>(F);
impl<F: FnOnce<Args>, Args: Tuple> FnOnce<Args> for FnResultWrapper<F> {
type Output = Result<F::Output, !>;
extern "rust-call" fn call_once(self, args: Args) -> Self::Output {
Ok(self.0.call_once(args))
}
}

View File

@ -1,66 +0,0 @@
//! Merging iterator
// Imports
use {either::Either, std::cmp::Ordering};
/// Merging sorted iterator
///
/// Will discard duplicate items.
pub struct DiscardingSortedMergeIter<T: Ord, Li: Iterator<Item = T>, Ri: Iterator<Item = T>> {
/// Left iterator
lhs: Li,
/// Right iterator
rhs: Ri,
/// Last element stored
last: Option<Either<T, T>>,
}
impl<T: Ord, Li: Iterator<Item = T>, Ri: Iterator<Item = T>> DiscardingSortedMergeIter<T, Li, Ri> {
/// Creates a new merging iterator
pub fn new(lhs: Li, rhs: Ri) -> Self {
Self { lhs, rhs, last: None }
}
/// Chooses between two values, storing the larger one and
/// discarding the `rhs` value if equal.
///
/// `self.last` must not be populated.
fn cmp_next(&mut self, lhs: T, rhs: T) -> T {
match lhs.cmp(&rhs) {
// Note: Discard rhs
Ordering::Equal => lhs,
Ordering::Less => {
self.last = Some(Either::Right(rhs));
lhs
},
Ordering::Greater => {
self.last = Some(Either::Left(lhs));
rhs
},
}
}
}
impl<T: Ord, Li: Iterator<Item = T>, Ri: Iterator<Item = T>> Iterator for DiscardingSortedMergeIter<T, Li, Ri> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match self.last.take() {
Some(Either::Left(lhs)) => match self.rhs.next() {
Some(rhs) => Some(self.cmp_next(lhs, rhs)),
None => Some(lhs),
},
Some(Either::Right(rhs)) => match self.lhs.next() {
Some(lhs) => Some(self.cmp_next(lhs, rhs)),
None => Some(rhs),
},
None => match (self.lhs.next(), self.rhs.next()) {
(None, None) => None,
(None, Some(func)) | (Some(func), None) => Some(func),
(Some(lhs), Some(rhs)) => Some(self.cmp_next(lhs, rhs)),
},
}
}
}

View File

@ -1,23 +0,0 @@
//! Display wrapper.
// Imports
use std::{cell::RefCell, fmt};
/// A display wrapper using `F`
pub struct DisplayWrapper<F: FnMut(&mut fmt::Formatter) -> fmt::Result>(RefCell<F>);
impl<F: FnMut(&mut fmt::Formatter) -> fmt::Result> DisplayWrapper<F> {
/// Creates a new display wrapper
#[must_use]
pub fn new(func: F) -> Self {
Self(RefCell::new(func))
}
}
impl<F: FnMut(&mut fmt::Formatter) -> fmt::Result> fmt::Display for DisplayWrapper<F> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Note: `f` cannot be re-entrant, so this cannot fail
self.0.borrow_mut()(f)
}
}

View File

@ -1,48 +0,0 @@
//! Type families
/// Result family
#[sealed::sealed(erase)]
pub trait ResultFamily: Into<Result<Self::Ok, Self::Err>> + From<Result<Self::Ok, Self::Err>> {
/// Ok type
type Ok;
/// Error type
type Err;
}
#[sealed::sealed]
impl<T, E> ResultFamily for Result<T, E> {
type Err = E;
type Ok = T;
}
/// Tuple 2 family
#[sealed::sealed(erase)]
pub trait Tuple2Family: Into<(Self::A, Self::B)> + From<(Self::A, Self::B)> {
/// First type
type A;
/// Second type
type B;
}
#[sealed::sealed]
impl<A, B> Tuple2Family for (A, B) {
type A = A;
type B = B;
}
/// Slice family
#[sealed::sealed(erase)]
pub trait SliceFamily
where
for<'a> &'a Self: From<&'a [Self::Value]> + Into<&'a [Self::Value]>,
{
/// Value of each element
type Value;
}
#[sealed::sealed]
impl<T> SliceFamily for [T] {
type Value = T;
}

View File

@ -1,59 +0,0 @@
//! File lock
// Imports
use std::{
fs,
io,
path::{Path, PathBuf},
};
/// A file lock
#[derive(Debug)]
pub struct FileLock {
/// Lock path
path: PathBuf,
}
impl FileLock {
/// Creates a new file lock
pub fn new(path: impl Into<PathBuf> + AsRef<Path>) -> Option<Self> {
// Then try to open it
fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(&path)
.map(move |_| Self { path: path.into() })
.ok()
}
/// Unlocks this lock.
///
/// Note: This should only be called right before destroying the file lock
fn unlock_ref_mut(&mut self) -> Result<(), io::Error> {
// Try to delete the file
fs::remove_file(&self.path)?;
Ok(())
}
/// Unlocks this lock
pub fn unlock(mut self) -> Result<(), io::Error> {
// Unlock ourselves
// Note: We can't use `?`, as then we'd also run the destructor if it failed.
let res = self.unlock_ref_mut();
// And forget ourselves
#[allow(clippy::mem_forget)] // We explicitly do not want to run the destructor
std::mem::forget(self);
res
}
}
impl Drop for FileLock {
fn drop(&mut self) {
if let Err(err) = self.unlock_ref_mut() {
log::warn!("Unable to unlock {:?}: {err}", self.path);
}
}
}

View File

@ -1,276 +0,0 @@
//! Io slice
// Imports
use {
crate::write_take::WriteTakeExt,
std::{
convert::TryFrom,
io::{self, Read, Seek, SeekFrom, Write},
ops::{Bound, RangeBounds},
},
};
/// Io slice.
///
/// Slices an inner value to only allow access to a range.
#[derive(Clone, Debug)]
pub struct IoSlice<T> {
/// Inner value
inner: T,
/// Start position
start_pos: u64,
/// End position
end_pos: u64,
}
impl<T: Seek> IoSlice<T> {
/// Creates a new slice given a u64 range
pub fn new<R: RangeBounds<u64>>(mut inner: T, range: R) -> Result<Self, io::Error> {
// Get the start position and simplify the end bound
// TODO: Check if saturating on overflow is fine here, should be.
let start_pos = match range.start_bound().cloned() {
Bound::Included(start) => start,
Bound::Excluded(start) => start.saturating_add(1),
Bound::Unbounded => 0,
};
let end_bound = match range.end_bound().cloned() {
Bound::Included(end) => Some(end.saturating_add(1)),
Bound::Excluded(end) => Some(end),
Bound::Unbounded => None,
};
// Then seek to the start and get the end position
let end_pos = match end_bound {
Some(end_pos) => {
inner.seek(SeekFrom::Start(start_pos))?;
end_pos
},
None => {
let end_pos = inner.seek(SeekFrom::End(0))?;
inner.seek(SeekFrom::Start(start_pos))?;
end_pos
},
};
if end_pos < start_pos {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"End position can't be after start position",
));
}
Ok(Self {
inner,
start_pos,
end_pos,
})
}
/// Creates a new slice from an offset (from the start of the stream) and a length
pub fn new_with_offset_len(mut inner: T, start_pos: u64, len: u64) -> Result<Self, io::Error> {
// Get the end position
// TODO: Check if saturating add is good enough here? Use case is `size == usize::MAX`
let end_pos = start_pos.saturating_add(len);
// Seek to the start
inner.seek(SeekFrom::Start(start_pos))?;
Ok(Self {
inner,
start_pos,
end_pos,
})
}
/// Creates a slice from the current position with at most `size` bytes.
pub fn new_take(mut inner: T, size: u64) -> Result<Self, io::Error> {
let start_pos = inner.stream_position()?;
Self::new_with_offset_len(inner, start_pos, size)
}
/// Consumes this slice and returns the inner value
pub fn into_inner(self) -> T {
self.inner
}
/// Returns the len of this slice
pub fn len(&self) -> u64 {
self.end_pos.saturating_sub(self.start_pos)
}
/// Returns if this slice is empty
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the current position of the slice
pub fn cur_pos(&mut self) -> Result<u64, io::Error> {
let inner_pos = self.inner.stream_position()?;
Ok(inner_pos - self.start_pos)
}
/// Returns the remaining length of the slice
pub fn remaining_len(&mut self) -> Result<u64, io::Error> {
Ok(self.end_pos - self.inner.stream_position()?)
}
}
impl<T: Read + Seek> Read for IoSlice<T> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).read(buf)
}
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).read_to_end(buf)
}
fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).read_to_string(buf)
}
fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).read_exact(buf)
}
}
impl<T: Write + Seek> Write for IoSlice<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.inner.flush()
}
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).write_all(buf)
}
fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> io::Result<()> {
let limit = self.remaining_len()?;
self.inner.by_ref().take(limit).write_fmt(fmt)
}
}
impl<T: Seek> Seek for IoSlice<T> {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
match pos {
// When seeking from start, stop at the end without overflowing.
SeekFrom::Start(pos) => {
let pos = u64::min(pos, self.len());
let inner_pos = self.inner.seek(SeekFrom::Start(self.start_pos + pos))?;
Ok(inner_pos - self.start_pos)
},
// Special case `End(0)` for `stream_len`.
SeekFrom::End(0) => {
let inner_pos = self.inner.seek(SeekFrom::Start(self.end_pos))?;
Ok(inner_pos - self.start_pos)
},
// If trying to seek past the end, return error
SeekFrom::End(1..=i64::MAX) => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Cannot seek past the slice",
)),
// When seeking from end, stop at the start without underflowing
SeekFrom::End(_) => todo!(),
// Special case `Current(0)` for `stream_position`
SeekFrom::Current(0) => self.cur_pos(),
// When seeking from an offset, stop at either start or end without over/underflowing
SeekFrom::Current(offset) => {
// Calculate the actual offset to use, by clamping
let offset = match offset.is_negative() {
// If zero / positive, check overflow
false => {
let until_end =
i64::try_from(self.remaining_len()?).expect("Remaining size didn't fit into an `i64`");
i64::min(until_end, offset)
},
// If negative, check underflow
true => {
let until_start =
-i64::try_from(self.cur_pos()?).expect("Remaining size didn't fit into an `i64`");
i64::max(until_start, offset)
},
};
// Then seek to it using relative seeks
let inner_pos = self.inner.seek(SeekFrom::Current(offset))?;
Ok(inner_pos - self.start_pos)
},
}
}
}
// Impls for `&IoSlice<&T>`, such as `T = std::fs::File`
impl<'a, T> Read for &'a IoSlice<T>
where
for<'b> &'b mut &'a T: Read + Seek,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.read(buf))
}
fn read_vectored(&mut self, bufs: &mut [io::IoSliceMut<'_>]) -> io::Result<usize> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.read_vectored(bufs))
}
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.read_to_end(buf))
}
fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.read_to_string(buf))
}
fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.read_exact(buf))
}
}
impl<'a, T> Seek for &'a IoSlice<T>
where
for<'b> &'b mut &'a T: Seek,
{
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.seek(pos))
}
fn rewind(&mut self) -> io::Result<()> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.rewind())
}
fn stream_len(&mut self) -> io::Result<u64> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.stream_len())
}
fn stream_position(&mut self) -> io::Result<u64> {
self::exec_as_ref_ref_mut(self, |mut slice| slice.stream_position())
}
}
/// Creates a `IoSlice<&mut &T>` from `&mut & IoSlice<T>` and runs `f` with it
fn exec_as_ref_ref_mut<'a, 'b, T, O>(slice: &'b mut &'a IoSlice<T>, f: impl FnOnce(IoSlice<&'_ mut &'a T>) -> O) -> O {
let slice = IoSlice {
inner: &mut &slice.inner,
start_pos: slice.start_pos,
end_pos: slice.end_pos,
};
f(slice)
}

View File

@ -1,96 +0,0 @@
//! Iterator adaptors
// Imports
use std::iter::FromIterator;
/// Try map ok
pub struct TryMapOk<I, F> {
/// Iterator
iter: I,
/// Function
f: F,
}
impl<T, E, U, I: Iterator<Item = Result<T, E>>, F: FnMut(T) -> Result<U, E>> Iterator for TryMapOk<I, F> {
type Item = Result<U, E>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|value| value.and_then(&mut self.f))
}
}
/// Extension trait for [`TryMapOk`]
pub trait TryMapOkIter<T, E>: Iterator<Item = Result<T, E>> + Sized {
/// Creates a [`TryMapOk`] from this iterator
fn try_map_ok<F, U>(self, f: F) -> TryMapOk<Self, F>
where
F: FnMut(T) -> Result<U, E>,
{
TryMapOk { iter: self, f }
}
}
impl<T, E, I: Iterator<Item = Result<T, E>> + Sized> TryMapOkIter<T, E> for I {}
/// Map error
pub struct MapErr<I, F> {
/// Iterator
iter: I,
/// Function
f: F,
}
impl<T, E, E2, I: Iterator<Item = Result<T, E>>, F: FnMut(E) -> E2> Iterator for MapErr<I, F> {
type Item = Result<T, E2>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|value| value.map_err(&mut self.f))
}
}
/// Extension trait for [`MapErr`]
pub trait MapErrIter<T, E>: Iterator<Item = Result<T, E>> + Sized {
/// Creates a [`MapErr`] from this iterator
fn map_err<F, E2>(self, f: F) -> MapErr<Self, F>
where
F: FnMut(E) -> E2,
{
MapErr { iter: self, f }
}
}
impl<T, E, I: Iterator<Item = Result<T, E>> + Sized> MapErrIter<T, E> for I {}
/// Iterator length that may be collected.
#[derive(Clone, Copy, Debug)]
pub struct IterLen {
/// Length of iterator
len: usize,
}
impl IterLen {
/// Returns the number of items the iterator had
#[must_use]
pub const fn len(&self) -> usize {
self.len
}
/// Returns if the iterator was empty
#[must_use]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<A> FromIterator<A> for IterLen {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
Self {
len: iter.into_iter().count(),
}
}
}

View File

@ -1,182 +0,0 @@
//! Key-value parallel iterator
// Imports
use {either::Either, std::cmp::Ordering};
/// Iterator over two keyed sorted iterators, providing both
/// elements when keys are equal.
///
/// Otherwise, keys are returned in order.
pub struct KeyedParIter<L, R>
where
L: Iterator,
R: Iterator,
{
/// Left iterator
left: L,
/// Right iterator
right: R,
/// Currently cached value
value: Option<Either<L::Item, R::Item>>,
}
impl<L, R> KeyedParIter<L, R>
where
L: Iterator,
R: Iterator,
{
/// Creates a new iterator
#[must_use]
pub fn new(left: impl IntoIterator<IntoIter = L>, right: impl IntoIterator<IntoIter = R>) -> Self {
Self {
left: left.into_iter(),
right: right.into_iter(),
value: None,
}
}
/// Returns the next left value
pub fn next_left(&mut self) -> Option<L::Item> {
// Check if we have it cached
match self.value.take() {
// If we did, return it
Some(Either::Left(value)) => return Some(value),
// Else put it back
value => self.value = value,
}
// If we didn't have it cached get it from the iterator
self.left.next()
}
/// Returns the next right value
pub fn next_right(&mut self) -> Option<R::Item> {
// Check if we have it cached
match self.value.take() {
// If we did, return it
Some(Either::Right(value)) => return Some(value),
// Else put it back
value => self.value = value,
}
// If we didn't have it cached get it from the iterator
self.right.next()
}
}
impl<L, R> Iterator for KeyedParIter<L, R>
where
L: Iterator<Item: Keyed>,
R: Iterator<Item: Keyed>,
<L::Item as Keyed>::Key: PartialOrd<<R::Item as Keyed>::Key>,
{
type Item = ParIterValue<L::Item, R::Item>;
fn next(&mut self) -> Option<Self::Item> {
match (self.next_left(), self.next_right()) {
// If we only got one of each value, just return it
(Some(value), None) => Some(ParIterValue::Left(value)),
(None, Some(value)) => Some(ParIterValue::Right(value)),
// If we got both, compare them
(Some(left), Some(right)) => {
let ord = PartialOrd::partial_cmp(left.key(), right.key()).expect("An ordering is required");
let (value, ret) = match ord {
Ordering::Less => (Some(Either::Right(right)), ParIterValue::Left(left)),
Ordering::Greater => (Some(Either::Left(left)), ParIterValue::Right(right)),
Ordering::Equal => (None, ParIterValue::Both(left, right)),
};
assert!(self.value.is_none(), "`self.value` should be empty");
self.value = value;
Some(ret)
},
// Else we got none
(None, None) => None,
}
}
}
/// Iterator value
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum ParIterValue<L, R> {
/// Only left
Left(L),
/// Only right
Right(R),
/// Both
Both(L, R),
}
impl<L, R> ParIterValue<L, R> {
/// Returns a pair of options describing this value
#[must_use]
#[allow(clippy::missing_const_for_fn)] // False positive
pub fn into_opt_pair(self) -> (Option<L>, Option<R>) {
match self {
Self::Left(left) => (Some(left), None),
Self::Right(right) => (None, Some(right)),
Self::Both(left, right) => (Some(left), Some(right)),
}
}
/// Returns the key of this value
///
/// Note: When this value is [`Self::Both`], returns the left key
pub fn key(&self) -> &L::Key
where
L: Keyed,
R: Keyed<Key = L::Key>,
{
match self {
Self::Left(value) | Self::Both(value, _) => value.key(),
Self::Right(value) => value.key(),
}
}
/// Maps both possible values of this value
pub fn map<L2, R2>(self, lhs: impl FnOnce(L) -> L2, rhs: impl FnOnce(R) -> R2) -> ParIterValue<L2, R2> {
match self {
Self::Left(left) => ParIterValue::Left(lhs(left)),
Self::Right(right) => ParIterValue::Right(rhs(right)),
Self::Both(left, right) => ParIterValue::Both(lhs(left), rhs(right)),
}
}
}
impl<K, L, R> ParIterValue<(K, L), (K, R)> {
/// Splits the key and value off of this value
///
///
/// Note: When this value is [`Self::Both`], returns the left key
#[allow(clippy::missing_const_for_fn)] // False positive
pub fn key_value(self) -> (K, ParIterValue<L, R>) {
match self {
ParIterValue::Left((key, left)) => (key, ParIterValue::Left(left)),
ParIterValue::Right((key, right)) => (key, ParIterValue::Right(right)),
ParIterValue::Both((key, left), (_, right)) => (key, ParIterValue::Both(left, right)),
}
}
}
/// Keyed value
pub trait Keyed {
/// Key
type Key;
/// Returns this value's key
fn key(&self) -> &Self::Key;
}
// Used for most `*Map` iterators.
impl<K, V> Keyed for (K, V) {
type Key = K;
fn key(&self) -> &Self::Key {
&self.0
}
}

View File

@ -1,353 +0,0 @@
//! Utilities
//!
//! This crate is composed of random utilities I make whenever I'm generalizing a concept
//! found elsewhere, or just need to share some code between two workspaces.
//!
//! # Documentation
//! Documentation is pretty much nonexistent.
//! If a feature is used enough to require extensive documentation it will likely be
//! moved to another crate. Thus everything in here is unlikely to ever receive documentation,
//! unless moved elsewhere.
//!
//! # Stability
//! The crate is also unlikely to be moved from `0.1.0`, with features added and removed without
//! any version bump.
// Features
#![feature(
slice_index_methods,
seek_stream_len,
unboxed_closures,
fn_traits,
decl_macro,
auto_traits,
negative_impls,
try_trait_v2,
never_type,
unwrap_infallible,
tuple_trait
)]
// Modules
#[cfg(feature = "alert")]
pub mod alert;
pub mod alphabet;
pub mod array_split;
pub mod ascii_str_arr;
#[cfg(feature = "gui")]
pub mod ascii_text_buffer;
pub mod bcd;
pub mod btree_map_vector;
pub mod cached_value;
pub mod discarding_sorted_merge_iter;
pub mod display_wrapper;
pub mod family;
pub mod file_lock;
pub mod io_slice;
pub mod iter;
pub mod keyed_par_iter;
pub mod lock_poison;
pub mod map_box;
pub mod next_from_bytes;
pub mod null_ascii_string;
//pub mod ok_or_return;
pub mod signed_hex;
pub mod string_contains_case_insensitive;
#[cfg(feature = "use_futures")]
pub mod task;
pub mod try_into_as;
pub mod try_or_empty;
pub mod void;
pub mod write_take;
// Exports
#[cfg(feature = "gui")]
pub use ascii_text_buffer::AsciiTextBuffer;
pub use {
alphabet::{Alphabet, StrAlphabet, StrArrAlphabet, StringAlphabet},
ascii_str_arr::AsciiStrArr,
bcd::BcdU8,
btree_map_vector::BTreeMapVector,
cached_value::CachedValue,
discarding_sorted_merge_iter::DiscardingSortedMergeIter,
display_wrapper::DisplayWrapper,
family::{ResultFamily, Tuple2Family},
file_lock::FileLock,
io_slice::IoSlice,
iter::{IterLen, MapErr, TryMapOk},
keyed_par_iter::KeyedParIter,
lock_poison::{MutexPoison, RwLockPoison},
map_box::MapBoxResult,
next_from_bytes::NextFromBytes,
null_ascii_string::NullAsciiString,
signed_hex::SignedHex,
string_contains_case_insensitive::StrContainsCaseInsensitive,
try_into_as::TryIntoAs,
try_or_empty::TryOrEmpty,
void::Void,
write_take::WriteTake,
};
//pub use ok_or_return::{OkOrReturn, OkOrReturnResidual, OkOrReturnResult};
// Imports
use std::{
collections::hash_map::DefaultHasher,
error,
fmt,
hash::{Hash, Hasher},
io,
};
#[cfg(feature = "use_serde")]
use std::{fs, path::Path};
/// Error for [`parse_from_file`]
#[cfg(feature = "use_serde")]
#[derive(Debug, thiserror::Error)]
pub enum ParseFromFileError<E: fmt::Debug + error::Error + 'static> {
/// Unable to open file
#[error("Unable to open file")]
Open(#[source] io::Error),
/// Unable to parse the file
#[error("Unable to parse file")]
Parse(#[source] E),
}
/// Opens and parses a value from a file
#[cfg(feature = "use_serde")]
pub fn parse_from_file<
'de,
T: serde::Deserialize<'de>,
E: fmt::Debug + error::Error + 'static,
P: ?Sized + AsRef<Path>,
>(
path: &P,
parser: fn(fs::File) -> Result<T, E>,
) -> Result<T, ParseFromFileError<E>> {
let file = fs::File::open(path).map_err(ParseFromFileError::Open)?;
parser(file).map_err(ParseFromFileError::Parse)
}
/// Error for [`write_to_file`]
#[cfg(feature = "use_serde")]
#[derive(Debug, thiserror::Error)]
pub enum WriteToFileError<E: fmt::Debug + error::Error + 'static> {
/// Unable to create file
#[error("Unable to create file")]
Create(#[source] io::Error),
/// Unable to write the file
#[error("Unable to write file")]
Write(#[source] E),
}
/// Creates and writes a value to a file
#[cfg(feature = "use_serde")]
pub fn write_to_file<T: serde::Serialize, E: fmt::Debug + error::Error + 'static, P: ?Sized + AsRef<Path>>(
path: &P,
value: &T,
writer: fn(fs::File, &T) -> Result<(), E>,
) -> Result<(), WriteToFileError<E>> {
let file = fs::File::create(path).map_err(WriteToFileError::Create)?;
writer(file, value).map_err(WriteToFileError::Write)
}
/// Returns the absolute different between `a` and `b`, `a - b` as a `i64`.
///
/// # Panics
/// If the result would not fit into a `i64`, a panic occurs.
#[allow(clippy::as_conversions)] // We check every operation
#[allow(clippy::panic)] // Rust panics on failed arithmetic operations by default
#[must_use]
pub fn abs_diff(a: u64, b: u64) -> i64 {
let diff = a.abs_diff(b);
if diff > i64::MAX as u64 {
panic!("Overflow when computing signed distance between `u64`");
}
#[allow(clippy::cast_possible_wrap)] // We've verified, `diff` is less than `i64::MAX`
if a > b {
diff as i64
} else {
-(diff as i64)
}
}
/// Adds a `i64` to a `u64`, performing `a + b`.
///
/// If smaller than `0`, returns 0, if larger than `u64::MAX`, return `u64::MAX`
#[allow(clippy::as_conversions)] // We check every operation
#[allow(clippy::cast_sign_loss)] // We've verify it's positive / negative
#[must_use]
pub const fn saturating_signed_offset(a: u64, b: i64) -> u64 {
// If `b` is positive, check for overflows. Else check for underflows
if b > 0 {
a.saturating_add(b as u64)
} else {
let neg_b = match b.checked_neg() {
Some(neg_b) => neg_b as u64,
None => i64::MAX as u64 + 1,
};
a.saturating_sub(neg_b)
}
}
/// Adds a `i64` to a `u64`, performing `a + b`.
///
/// If smaller than `0` or larger than `u64::MAX`, returns `None`
#[allow(clippy::as_conversions)] // We check every operation
#[allow(clippy::cast_sign_loss)] // We've verify it's positive / negative
#[must_use]
pub const fn checked_signed_offset(a: u64, b: i64) -> Option<u64> {
// If `b` is positive, check for overflows. Else check for underflows
if b > 0 {
a.checked_add(b as u64)
} else {
let neg_b = match b.checked_neg() {
Some(neg_b) => neg_b as u64,
None => i64::MAX as u64 + 1,
};
a.checked_sub(neg_b)
}
}
/// Adds a `i64` to a `u64`, performing `a + b`.
///
/// If smaller than `0` or larger than `u64::MAX`, panics
#[allow(clippy::as_conversions)] // We check every operation
#[allow(clippy::cast_sign_loss)] // We've verify it's positive / negative
#[must_use]
pub const fn signed_offset(a: u64, b: i64) -> u64 {
if b > 0 {
a + b as u64
} else {
let neg_b = match b.checked_neg() {
Some(neg_b) => neg_b as u64,
None => i64::MAX as u64 + 1,
};
a - neg_b
}
}
/// Prints an error
pub fn fmt_err(err: &(dyn error::Error + '_), f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{err}")?;
let mut source = err.source();
for n in 1usize.. {
match source {
Some(err) => {
write!(f, "\n {n}: {err}")?;
source = err.source();
},
None => break,
}
}
Ok(())
}
/// Returns a wrapper that prints an error
pub fn fmt_err_wrapper<'a>(err: &'a (dyn error::Error + 'a)) -> impl fmt::Display + 'a {
DisplayWrapper::new(move |f| self::fmt_err(err, f))
}
/// Returns a wrapper that prints an error that owns the error
pub fn fmt_err_wrapper_owned<E: error::Error>(err: E) -> impl fmt::Display {
DisplayWrapper::new(move |f| self::fmt_err(&err, f))
}
// TODO: Rename both of these `try_*` to like `*_if_{not}_exists`.
/// Attempts to, recursively, create a directory.
///
/// Returns `Ok` if it already exists
pub fn try_create_dir_all(path: impl AsRef<std::path::Path>) -> Result<(), std::io::Error> {
match std::fs::create_dir_all(&path) {
Ok(_) => Ok(()),
// If it already exists, ignore
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
Err(err) => Err(err),
}
}
/// Attempts to remove a file. Returns `Ok` if it didn't exist.
pub fn try_remove_file(path: impl AsRef<std::path::Path>) -> Result<(), std::io::Error> {
match std::fs::remove_file(&path) {
Ok(_) => Ok(()),
// If it didn't exist, ignore
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(err) => Err(err),
}
}
/// Calculates the hash of any single value
pub fn hash_of<T: Hash>(value: &T) -> u64 {
let mut state = DefaultHasher::new();
value.hash(&mut state);
state.finish()
}
/// Helper to read an array of bytes from a reader
pub trait ReadByteArray {
/// Reads a byte array, `[u8; N]` from this reader
fn read_byte_array<const N: usize>(&mut self) -> Result<[u8; N], std::io::Error>;
}
impl<R: ?Sized + std::io::Read> ReadByteArray for R {
fn read_byte_array<const N: usize>(&mut self) -> Result<[u8; N], std::io::Error> {
let mut bytes = [0; N];
self.read_exact(&mut bytes)?;
Ok(bytes)
}
}
/// Helper for [`DisplayWrapper`] to create it out of a formatting string
pub macro display_wrapper( $( $args:tt )* ) {
$crate::DisplayWrapper::new(|f| {
write!(f, $( $args )*)
})
}
/// Reads into a slice until eof.
///
/// Returns the remaining non-filled buffer.
// Note: Based on the `default_read_exact` function in `std`.
pub fn read_slice_until_eof<'a, R: io::Read + ?Sized>(
reader: &mut R,
mut buffer: &'a mut [u8],
) -> Result<&'a mut [u8], ReadSliceUntilEofError> {
loop {
match reader.read(buffer) {
Ok(0) => return Ok(buffer),
Ok(n) => match buffer.get_mut(n..) {
Some(new_buf) => buffer = new_buf,
None => return Err(ReadSliceUntilEofError::FilledBuffer),
},
Err(e) if e.kind() == io::ErrorKind::Interrupted => (),
Err(e) => return Err(ReadSliceUntilEofError::Io(e)),
}
}
}
/// Error for [`read_slice_until_eof`]
#[derive(Debug, thiserror::Error)]
pub enum ReadSliceUntilEofError {
/// Io
#[error(transparent)]
Io(io::Error),
/// Filled the whole buffer before eof.
#[error("Filled the whole buffer before eof")]
FilledBuffer,
}
/// Sign extends a `u{N}` to a `u128`
pub fn sign_extend_un(value: u128, n: usize) -> u128 {
// Shift to left so that msb of `u{N}` is at msb of `u128`.
let shifted = (value << (128 - n)) as i128;
// Then shift back, and all bits will be 1 if negative, else 0
(shifted >> (128 - n)) as u128
}

View File

@ -1,52 +0,0 @@
//! Locking with poison
// Imports
use std::sync::{Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard};
/// Helper trait for locking `Mutex`s without handling poisoning
pub trait MutexPoison {
/// Guard type
type Guard;
/// Locks this mutex, panicking if poisoned
fn lock_unwrap(self) -> Self::Guard;
}
impl<'a, T> MutexPoison for &'a Mutex<T> {
type Guard = MutexGuard<'a, T>;
#[track_caller]
fn lock_unwrap(self) -> Self::Guard {
Mutex::lock(self).expect("Poisoned")
}
}
/// Helper trait for locking `RwLock`s without handling poisoning
pub trait RwLockPoison {
/// Read guard type
type ReadGuard;
/// Write guard type
type WriteGuard;
/// Locks this rwlock for reading, panicking if poisoned
fn read_unwrap(self) -> Self::ReadGuard;
/// Locks this rwlock for writing, panicking if poisoned
fn write_unwrap(self) -> Self::WriteGuard;
}
impl<'a, T> RwLockPoison for &'a RwLock<T> {
type ReadGuard = RwLockReadGuard<'a, T>;
type WriteGuard = RwLockWriteGuard<'a, T>;
#[track_caller]
fn read_unwrap(self) -> Self::ReadGuard {
RwLock::read(self).expect("Poisoned")
}
#[track_caller]
fn write_unwrap(self) -> Self::WriteGuard {
RwLock::write(self).expect("Poisoned")
}
}

View File

@ -1,19 +0,0 @@
//! Boxing + Mapping
// Imports
use crate::ResultFamily;
/// Boxes a variant of `Result` and maps it
pub trait MapBoxResult: ResultFamily {
/// Boxes the `Ok` variant
fn box_map<T, F: FnOnce(Box<Self::Ok>) -> T>(self, f: F) -> Result<T, Self::Err> {
self.into().map(Box::new).map(f)
}
/// Maps and boxes the `Err` variant
fn box_map_err<E, F: FnOnce(Box<Self::Err>) -> E>(self, f: F) -> Result<Self::Ok, E> {
self.into().map_err(Box::new).map_err(f)
}
}
impl<T, E> MapBoxResult for Result<T, E> {}

View File

@ -1,36 +0,0 @@
//! Next type from bytes
/// Parses some types from bytes
pub trait NextFromBytes {
/// Parses the next `u8` from bytes
fn next_u8(&self) -> Option<u8>;
/// Parses the next `u16` from bytes
fn next_u16(&self) -> Option<u16>;
/// Parses the next `u32` from bytes
fn next_u32(&self) -> Option<u32>;
}
impl NextFromBytes for [u8] {
fn next_u8(&self) -> Option<u8> {
match *self {
[a, ..] => Some(a),
_ => None,
}
}
fn next_u16(&self) -> Option<u16> {
match *self {
[a, b, ..] => Some(u16::from_ne_bytes([a, b])),
_ => None,
}
}
fn next_u32(&self) -> Option<u32> {
match *self {
[a, b, c, d, ..] => Some(u32::from_ne_bytes([a, b, c, d])),
_ => None,
}
}
}

View File

@ -1,63 +0,0 @@
//! Null-terminated ascii string helpers
// Modules
mod error;
// Exports
pub use error::ReadError;
// Imports
use {crate::AsciiStrArr, std::convert::TryInto};
/// Trait for reading null terminated ascii strings from a buffer
pub trait NullAsciiString<const N: usize> {
/// Reads a null terminated ascii string from this buffer and returns it
fn read_string(&self) -> Result<AsciiStrArr<N>, ReadError>;
/// Writes a null terminated ascii string to this buffer and returns it
fn write_string(&mut self, s: &AsciiStrArr<N>);
}
// TODO: Get rid of this once we're able to use `{N + 1}`
macro_rules! impl_null_ascii_string {
($($N:expr),* $(,)?) => {
$(
impl NullAsciiString<$N> for [u8; $N + 1] {
fn read_string(&self) -> Result<AsciiStrArr<$N>, ReadError> {
// Find the first null and trim the buffer until it
let buf = match self.iter().position(|&b| b == b'\0') {
// Note: `idx < len`, so this cannot panic
Some(idx) => &self[..idx],
None => return Err(ReadError::NoNull),
};
// Then convert it to the ascii string array
Ok(ascii::AsciiStr::from_ascii(buf)
.map_err(ReadError::NotAscii)?
.try_into()
.expect("Null terminated `[u8; N+1]` didn't fit into `AsciiStringArr<N>`")
)
}
#[allow(unused_comparisons)] // With N = 0 this function does nothing
fn write_string(&mut self, input: &AsciiStrArr<$N>) {
// Copy everything over and set the last byte to 0
// Note: No need to override the remaining bytes
// Note: `len < N`, so this cannot panic.
let len = input.len();
self[..len].copy_from_slice(input.as_bytes());
self[len] = 0;
}
}
)*
}
}
#[rustfmt::skip]
impl_null_ascii_string!{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32,
1971
}

View File

@ -1,13 +0,0 @@
//! Errors
/// Error type for [`NullAsciiString::read_string`](super::NullAsciiString::read_string)
#[derive(PartialEq, Eq, Clone, Copy, Debug, thiserror::Error)]
pub enum ReadError {
/// No null was found in the string
#[error("No null was found on the buffer")]
NoNull,
/// The string was not ascii
#[error("The buffer did not contain valid Ascii")]
NotAscii(#[source] ascii::AsAsciiStrError),
}

View File

@ -1,66 +0,0 @@
//! `Ok` or return a value
use std::ops::{ControlFlow, FromResidual, Try};
/// Extension trait to create a [`OkOrReturnResult`]
pub trait OkOrReturn: Try + Sized {
/// Returns the output of this result, or returns `value`
fn ok_or_return<Ret>(self, value: Ret) -> OkOrReturnResult<Self::Output, Ret> {
self.ok_or_else_return(|_| value)
}
/// Returns the output of this result, or returns with the output of `f`
fn ok_or_else_return<Ret, F: FnOnce(Self::Residual) -> Ret>(self, f: F) -> OkOrReturnResult<Self::Output, Ret>;
}
impl<T: Try> OkOrReturn for T {
fn ok_or_else_return<Ret, F: FnOnce(Self::Residual) -> Ret>(self, f: F) -> OkOrReturnResult<Self::Output, Ret> {
match self.branch() {
ControlFlow::Continue(output) => OkOrReturnResult::Ok(output),
ControlFlow::Break(residual) => OkOrReturnResult::Ret(f(residual)),
}
}
}
/// `Try` type for getting either a value out, or returning a value
pub enum OkOrReturnResult<T, Ret> {
/// Successful
Ok(T),
/// Return
Ret(Ret),
}
/// Residual for [`OkOrReturnResult`]
pub struct OkOrReturnResidual<Ret> {
/// Return value
ret: Ret,
}
impl<T, Ret> Try for OkOrReturnResult<T, Ret> {
type Output = T;
type Residual = OkOrReturnResidual<Ret>;
fn from_output(output: Self::Output) -> Self {
Self::Ok(output)
}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
match self {
Self::Ok(output) => ControlFlow::Continue(output),
Self::Ret(ret) => ControlFlow::Break(OkOrReturnResidual { ret }),
}
}
}
impl<Ret> FromResidual<OkOrReturnResidual<Ret>> for Ret {
fn from_residual(residual: OkOrReturnResidual<Ret>) -> Self {
residual.ret
}
}
impl<T, Ret> FromResidual<OkOrReturnResidual<Ret>> for OkOrReturnResult<T, Ret> {
fn from_residual(residual: OkOrReturnResidual<Ret>) -> Self {
OkOrReturnResult::Ret(residual.ret)
}
}

View File

@ -1,34 +0,0 @@
//! Peekable iterators
/// Iterators which are peekable
pub trait PeekableIter: Iterator {
/// Peeks the next element
fn peek(&self) -> Option<Self::Item>;
/// Consumes the next element if `f` returns true
fn next_if(&mut self, f: impl FnOnce(Self::Item) -> bool) -> bool;
/// Consumes the next element if `f` returns `Some`
fn try_next<T: std::ops::Try>(&mut self, f: impl FnOnce(Self::Item) -> T) -> Option<Result<T::Ok, T::Error>>;
}
impl<I: Iterator + Clone> PeekableIter for I {
fn peek(&self) -> Option<Self::Item> {
self.clone().next()
}
fn next_if(&mut self, f: impl FnOnce(Self::Item) -> bool) -> bool {
matches!(self.try_next(move |value| f(value).then_some(())), Some(Ok(())))
}
fn try_next<T: std::ops::Try>(&mut self, f: impl FnOnce(Self::Item) -> T) -> Option<Result<T::Ok, T::Error>> {
let mut iter = self.clone();
match iter.next().map(f)?.into_result() {
Ok(value) => {
*self = iter;
Some(Ok(value))
},
Err(err) => Some(Err(err)),
}
}
}

View File

@ -1,54 +0,0 @@
//! Signed hexadecimal formatting
// TODO: Improve this module overall.
// Imports
use {int_conv::Extended, ref_cast::RefCast, std::fmt};
/// A signed numeric type that uses signed hexadecimal formatting.
#[derive(ref_cast::RefCast)]
#[repr(transparent)]
pub struct SignedHex<T>(pub T);
// All references implement it for their underlying type.
impl<T> fmt::Display for SignedHex<&T>
where
SignedHex<T>: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<SignedHex<T> as fmt::Display>::fmt(SignedHex::<T>::ref_cast(self.0), f)
}
}
/// Macro to help implement [`SignedHex`]
macro_rules! impl_signed_hex {
($($T:ty => $TBigger:ty),* $(,)?) => {
$(
impl fmt::Display for SignedHex<$T> {
#[allow(clippy::default_numeric_fallback)] // We want inference to take care of the `0` here
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let sign = match (self.0 < 0, f.sign_plus()) {
(true, _) => "-",
(false, true) => "+",
_ => "",
};
f.write_str(sign)?;
if f.sign_plus() {
todo!("Signed hex does not support + flag yet");
};
// TODO: Remove `+` from the formatter flags when we do
// this to fully support the `+` flag.
fmt::LowerHex::fmt(&self.0.extended::<$TBigger>().abs(), f)
}
}
)*
}
}
impl_signed_hex! {
i8 => i16,
i16 => i32,
i32 => i64,
i64 => i128,
}

View File

@ -1,22 +0,0 @@
//! String contains with case insensitivity
/// Helper trait for `contains_case_insensitive`
pub trait StrContainsCaseInsensitive {
/// Checks if string `pattern` is contained in `haystack` without
/// checking for case
fn contains_case_insensitive(&self, pattern: &str) -> bool;
}
impl StrContainsCaseInsensitive for str {
fn contains_case_insensitive(mut self: &Self, pattern: &str) -> bool {
loop {
match self.get(..pattern.len()) {
Some(s) => match s.eq_ignore_ascii_case(pattern) {
true => return true,
false => self = &self[1..],
},
None => return false,
}
}
}
}

View File

@ -1,70 +0,0 @@
//! Tasks
// Imports
use std::{
future::Future,
pin::Pin,
sync::{Arc, Mutex},
task::Poll,
thread,
};
/// Spawns a task and returns a future for awaiting it's value
pub fn spawn<T: Send + 'static>(f: impl FnOnce() -> T + Send + 'static) -> ValueFuture<T> {
// Create the value mutex
let mutex = Arc::new(Mutex::new(None));
let future = ValueFuture {
value: Arc::clone(&mutex),
exhausted: false,
};
// Spawn the task
// TODO: If not null, maybe use a threadpool for small tasks?
thread::spawn(move || {
let value = f();
*mutex.lock().expect("Poisoned") = Some(value);
});
// And return the future
future
}
/// Value future
pub struct ValueFuture<T> {
/// Underlying value
value: Arc<Mutex<Option<T>>>,
/// If the value was already retrieved
exhausted: bool,
}
impl<T> ValueFuture<T> {
/// Returns the value if finished
pub fn get(&mut self) -> Option<T> {
let mut cx = std::task::Context::from_waker(futures::task::noop_waker_ref());
match Pin::new(self).poll(&mut cx) {
Poll::Ready(value) => Some(value),
Poll::Pending => None,
}
}
}
impl<T> Future for ValueFuture<T> {
type Output = T;
fn poll(mut self: Pin<&mut Self>, _cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
let Self { value, exhausted } = &mut *self;
// If we already retrieved the value, panic
assert!(!*exhausted, "Cannot call `poll` on an exhausted future");
// Else if we're done, return it
match value.lock().expect("Poisoned").take() {
Some(value) => {
*exhausted = true;
Poll::Ready(value)
},
None => Poll::Pending,
}
}
}

View File

@ -1,22 +0,0 @@
//! Try into as
// Imports
use std::convert::TryInto;
/// Helper for [`TryInto`] to use turbofish
pub trait TryIntoAs: Sized {
/// Tries to convert `Self` into `T` using `TryInto`
/// with type annotations.
fn try_into_as<T>(self) -> Result<T, Self::Error>
where
Self: TryInto<T>;
}
impl<U> TryIntoAs for U {
fn try_into_as<T>(self) -> Result<T, <Self as TryInto<T>>::Error>
where
Self: TryInto<T>,
{
self.try_into()
}
}

View File

@ -1,35 +0,0 @@
//! [`Try`] types or [`()`] trait
// Imports
use std::ops::{ControlFlow, Try};
/// Trait implemented for all types except [`()`]
pub auto trait NotEmpty {}
impl !NotEmpty for () {}
/// Trait implemented by either `Try<Output = ()>` types or `()`
pub trait TryOrEmpty {
/// Try type
type Try: Try<Output = ()>;
/// Converts this type into the try type
fn into_try(self) -> Self::Try;
}
impl<T: Try<Output = ()> + NotEmpty> TryOrEmpty for T {
type Try = T;
fn into_try(self) -> Self::Try {
self
}
}
impl TryOrEmpty for () {
type Try = ControlFlow<!>;
fn into_try(self) -> Self::Try {
ControlFlow::Continue(self)
}
}

View File

@ -1,9 +0,0 @@
//! Void
/// Void a value explicitly
pub trait Void: Sized {
/// Void this value
fn void(self) {}
}
impl<T> Void for T {}

View File

@ -1,58 +0,0 @@
//! Writer adaptor for limiting bytes written
// Imports
use std::io::{self, Write};
/// Writer adaptor for limiting bytes written.
#[derive(Debug)]
pub struct WriteTake<T> {
/// Inner value
inner: T,
/// Limit
limit: u64,
}
impl<T> WriteTake<T> {
/// Creates a new adaptor
pub const fn new(inner: T, limit: u64) -> Self {
Self { inner, limit }
}
/// Consumes the adaptor and returns the inner writer
#[allow(clippy::missing_const_for_fn)] // False positive
pub fn into_inner(self) -> T {
self.inner
}
}
impl<T: Write> Write for WriteTake<T> {
#[allow(clippy::as_conversions, clippy::cast_possible_truncation)] // TODO: Check if this needs any care?
fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> {
// If we hit the limit, return 0.
if self.limit == 0 {
return Ok(0);
}
// Else write at most `limit` bytes and update our limit
let bytes_to_write = u64::min(buf.len() as u64, self.limit) as usize;
let bytes_written = self.inner.write(&buf[..bytes_to_write])?;
self.limit -= bytes_written as u64;
Ok(bytes_written)
}
fn flush(&mut self) -> Result<(), io::Error> {
// Flush our inner writer
self.inner.flush()
}
}
/// Extension trait for [`WriteTake`]
pub trait WriteTakeExt: Sized {
/// Creates a [`WriteTake`] to limit the number of bytes written
fn take(self, limit: u64) -> WriteTake<Self> {
WriteTake::new(self, limit)
}
}
impl<W: Write> WriteTakeExt for W {}