builtin/test: refactor the Token enum to be more granular (#10357)

* builtin/test: Split Token enum into 2-level hierarchy

* builtin/test: Rearrange the Token enum hierarchy

* builtin/test: Separate Token into Unary and Binary

* builtin/test: import IsOkAnd polyfill

* builtin/test: Rename enum variants one more time
This commit is contained in:
Andrew Neth 2024-03-15 23:24:44 -05:00 committed by GitHub
parent 360342bb9e
commit 08220c2189
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -4,6 +4,8 @@ use crate::common;
mod test_expressions {
use super::*;
#[allow(unused_imports)]
use crate::future::IsOkAnd;
use crate::nix::isatty;
use crate::wutil::{
file_id_for_path, fish_wcswidth, lwstat, waccess, wcstod::wcstod, wcstoi_opts, wstat,
@ -15,54 +17,128 @@ mod test_expressions {
#[derive(Copy, Clone, PartialEq, Eq)]
pub(super) enum Token {
unknown, // arbitrary string
Unknown, // Arbitrary string
Unary(UnaryToken), // Takes one string/file
Binary(BinaryToken), // Takes two strings/files/numbers
UnaryBoolean(UnaryBooleanToken), // Unary truth function
BinaryBoolean(Combiner), // Binary truth function
ParenOpen, // (
ParenClose, // )
}
bang, // "!", inverts sense
impl From<BinaryToken> for Token {
fn from(value: BinaryToken) -> Self {
Self::Binary(value)
}
}
filetype_b, // "-b", for block special files
filetype_c, // "-c", for character special files
filetype_d, // "-d", for directories
filetype_e, // "-e", for files that exist
filetype_f, // "-f", for for regular files
filetype_G, // "-G", for check effective group id
filetype_g, // "-g", for set-group-id
filetype_h, // "-h", for symbolic links
filetype_k, // "-k", for sticky bit
filetype_L, // "-L", same as -h
filetype_O, // "-O", for check effective user id
filetype_p, // "-p", for FIFO
filetype_S, // "-S", socket
impl From<UnaryToken> for Token {
fn from(value: UnaryToken) -> Self {
Self::Unary(value)
}
}
filesize_s, // "-s", size greater than zero
#[derive(Copy, Clone, PartialEq, Eq)]
pub(super) enum UnaryBooleanToken {
Bang, // "!", inverts sense
}
filedesc_t, // "-t", whether the fd is associated with a terminal
#[derive(Copy, Clone, PartialEq, Eq)]
pub(super) enum Combiner {
And, // "-a", true if left and right are both true
Or, // "-o", true if either left or right is true
}
fileperm_r, // "-r", read permission
fileperm_u, // "-u", whether file is setuid
fileperm_w, // "-w", whether file write permission is allowed
fileperm_x, // "-x", whether file execute/search is allowed
macro_rules! define_token {
(
enum $enum:ident;
$(
$variant:ident($sub_type:ident) {
$($sub_variant:ident)+
}
)*
) => {
#[derive(Copy, Clone, PartialEq, Eq)]
pub(super) enum $enum {
$($variant($sub_type),)*
}
string_n, // "-n", non-empty string
string_z, // "-z", true if length of string is 0
string_equal, // "=", true if strings are identical
string_not_equal, // "!=", true if strings are not identical
$(
#[derive(Copy, Clone, PartialEq, Eq)]
pub(super) enum $sub_type { $($sub_variant,)+ }
file_newer, // f1 -nt f2, true if f1 exists and is newer than f2, or there is no f2
file_older, // f1 -ot f2, true if f2 exists and f1 does not, or f1 is older than f2
file_same, // f1 -ef f2, true if f1 and f2 exist and refer to same file
impl From<$sub_type> for Token {
fn from(value: $sub_type) -> Token {
$enum::$variant(value).into()
}
}
)*
};
}
number_equal, // "-eq", true if numbers are equal
number_not_equal, // "-ne", true if numbers are not equal
number_greater, // "-gt", true if first number is larger than second
number_greater_equal, // "-ge", true if first number is at least second
number_lesser, // "-lt", true if first number is smaller than second
number_lesser_equal, // "-le", true if first number is at most second
define_token! {
enum UnaryToken;
combine_and, // "-a", true if left and right are both true
combine_or, // "-o", true if either left or right is true
// based on stat()
FileStat(StatPredicate) {
b // "-b", for block special files
c // "-c", for character special files
d // "-d", for directories
e // "-e", for files that exist
f // "-f", for for regular files
G // "-G", for check effective group id
g // "-g", for set-group-id
k // "-k", for sticky bit
O // "-O", for check effective user id
p // "-p", for FIFO
S // "-S", socket
s // "-s", size greater than zero
u // "-u", whether file is setuid
}
paren_open, // "(", open paren
paren_close, // ")", close paren
// based on access()
FilePerm(FilePermission) {
r // "-r", read permission
w // "-w", whether file write permission is allowed
x // "-x", whether file execute/search is allowed
}
// miscellaneous
FileType(FilePredicate) {
h // "-h", for symbolic links
L // "-L", same as -h
t // "-t", whether the fd is associated with a terminal
}
String(StringPredicate) {
n // "-n", non-empty string
z // "-z", true if length of string is 0
}
}
define_token! {
enum BinaryToken;
// based on inode + more distinguishing info (see FileId struct)
FileId(FileComparison) {
Newer // f1 -nt f2, true if f1 exists and is newer than f2, or there is no f2
Older // f1 -ot f2, true if f2 exists and f1 does not, or f1 is older than f2
Same // f1 -ef f2, true if f1 and f2 exist and refer to same file
}
String(StringComparison) {
Equal // "=", true if strings are identical
NotEqual // "!=", true if strings are not identical
}
Number(NumberComparison) {
Equal // "-eq", true if numbers are equal
NotEqual // "-ne", true if numbers are not equal
Greater // "-gt", true if first number is larger than second
GreaterEqual // "-ge", true if first number is at least second
Lesser // "-lt", true if first number is smaller than second
LesserEqual // "-le", true if first number is at most second
}
}
/// Our number type. We support both doubles and long longs. We have to support these separately
@ -103,71 +179,50 @@ mod test_expressions {
}
}
const UNARY_PRIMARY: u32 = 1 << 0;
const BINARY_PRIMARY: u32 = 1 << 1;
struct TokenInfo {
tok: Token,
flags: u32,
fn token_for_string(str: &wstr) -> Token {
TOKEN_INFOS.get(str).copied().unwrap_or(Token::Unknown)
}
impl TokenInfo {
fn new(tok: Token, flags: u32) -> Self {
Self { tok, flags }
}
}
fn token_for_string(str: &wstr) -> &'static TokenInfo {
if let Some(res) = TOKEN_INFOS.get(str) {
res
} else {
TOKEN_INFOS
.get(L!(""))
.expect("Should have token for empty string")
}
}
static TOKEN_INFOS: Lazy<HashMap<&'static wstr, TokenInfo>> = Lazy::new(|| {
#[rustfmt::skip]
static TOKEN_INFOS: Lazy<HashMap<&'static wstr, Token>> = Lazy::new(|| {
let pairs = [
(L!(""), TokenInfo::new(Token::unknown, 0)),
(L!("!"), TokenInfo::new(Token::bang, 0)),
(L!("-b"), TokenInfo::new(Token::filetype_b, UNARY_PRIMARY)),
(L!("-c"), TokenInfo::new(Token::filetype_c, UNARY_PRIMARY)),
(L!("-d"), TokenInfo::new(Token::filetype_d, UNARY_PRIMARY)),
(L!("-e"), TokenInfo::new(Token::filetype_e, UNARY_PRIMARY)),
(L!("-f"), TokenInfo::new(Token::filetype_f, UNARY_PRIMARY)),
(L!("-G"), TokenInfo::new(Token::filetype_G, UNARY_PRIMARY)),
(L!("-g"), TokenInfo::new(Token::filetype_g, UNARY_PRIMARY)),
(L!("-h"), TokenInfo::new(Token::filetype_h, UNARY_PRIMARY)),
(L!("-k"), TokenInfo::new(Token::filetype_k, UNARY_PRIMARY)),
(L!("-L"), TokenInfo::new(Token::filetype_L, UNARY_PRIMARY)),
(L!("-O"), TokenInfo::new(Token::filetype_O, UNARY_PRIMARY)),
(L!("-p"), TokenInfo::new(Token::filetype_p, UNARY_PRIMARY)),
(L!("-S"), TokenInfo::new(Token::filetype_S, UNARY_PRIMARY)),
(L!("-s"), TokenInfo::new(Token::filesize_s, UNARY_PRIMARY)),
(L!("-t"), TokenInfo::new(Token::filedesc_t, UNARY_PRIMARY)),
(L!("-r"), TokenInfo::new(Token::fileperm_r, UNARY_PRIMARY)),
(L!("-u"), TokenInfo::new(Token::fileperm_u, UNARY_PRIMARY)),
(L!("-w"), TokenInfo::new(Token::fileperm_w, UNARY_PRIMARY)),
(L!("-x"), TokenInfo::new(Token::fileperm_x, UNARY_PRIMARY)),
(L!("-n"), TokenInfo::new(Token::string_n, UNARY_PRIMARY)),
(L!("-z"), TokenInfo::new(Token::string_z, UNARY_PRIMARY)),
(L!("="), TokenInfo::new(Token::string_equal, BINARY_PRIMARY)),
(L!("!="), TokenInfo::new(Token::string_not_equal, BINARY_PRIMARY)),
(L!("-nt"), TokenInfo::new(Token::file_newer, BINARY_PRIMARY)),
(L!("-ot"), TokenInfo::new(Token::file_older, BINARY_PRIMARY)),
(L!("-ef"), TokenInfo::new(Token::file_same, BINARY_PRIMARY)),
(L!("-eq"), TokenInfo::new(Token::number_equal, BINARY_PRIMARY)),
(L!("-ne"), TokenInfo::new(Token::number_not_equal, BINARY_PRIMARY)),
(L!("-gt"), TokenInfo::new(Token::number_greater, BINARY_PRIMARY)),
(L!("-ge"), TokenInfo::new(Token::number_greater_equal, BINARY_PRIMARY)),
(L!("-lt"), TokenInfo::new(Token::number_lesser, BINARY_PRIMARY)),
(L!("-le"), TokenInfo::new(Token::number_lesser_equal, BINARY_PRIMARY)),
(L!("-a"), TokenInfo::new(Token::combine_and, 0)),
(L!("-o"), TokenInfo::new(Token::combine_or, 0)),
(L!("("), TokenInfo::new(Token::paren_open, 0)),
(L!(")"), TokenInfo::new(Token::paren_close, 0))
(L!(""), Token::Unknown),
(L!("!"), Token::UnaryBoolean(UnaryBooleanToken::Bang)),
(L!("-b"), StatPredicate::b.into()),
(L!("-c"), StatPredicate::c.into()),
(L!("-d"), StatPredicate::d.into()),
(L!("-e"), StatPredicate::e.into()),
(L!("-f"), StatPredicate::f.into()),
(L!("-G"), StatPredicate::G.into()),
(L!("-g"), StatPredicate::g.into()),
(L!("-h"), FilePredicate::h.into()),
(L!("-k"), StatPredicate::k.into()),
(L!("-L"), FilePredicate::L.into()),
(L!("-O"), StatPredicate::O.into()),
(L!("-p"), StatPredicate::p.into()),
(L!("-S"), StatPredicate::S.into()),
(L!("-s"), StatPredicate::s.into()),
(L!("-t"), FilePredicate::t.into()),
(L!("-r"), FilePermission::r.into()),
(L!("-u"), StatPredicate::u.into()),
(L!("-w"), FilePermission::w.into()),
(L!("-x"), FilePermission::x.into()),
(L!("-n"), StringPredicate::n.into()),
(L!("-z"), StringPredicate::z.into()),
(L!("="), StringComparison::Equal.into()),
(L!("!="), StringComparison::NotEqual.into()),
(L!("-nt"), FileComparison::Newer.into()),
(L!("-ot"), FileComparison::Older.into()),
(L!("-ef"), FileComparison::Same.into()),
(L!("-eq"), NumberComparison::Equal.into()),
(L!("-ne"), NumberComparison::NotEqual.into()),
(L!("-gt"), NumberComparison::Greater.into()),
(L!("-ge"), NumberComparison::GreaterEqual.into()),
(L!("-lt"), NumberComparison::Lesser.into()),
(L!("-le"), NumberComparison::LesserEqual.into()),
(L!("-a"), Token::BinaryBoolean(Combiner::And)),
(L!("-o"), Token::BinaryBoolean(Combiner::Or)),
(L!("("), Token::ParenOpen),
(L!(")"), Token::ParenClose),
];
pairs.into_iter().collect()
});
@ -225,10 +280,16 @@ mod test_expressions {
}
}
/// Single argument like -n foo or "just a string".
/// Something that is not a token of any other type.
struct JustAString {
arg: WString,
range: Range,
}
/// Single argument like -n foo.
struct UnaryPrimary {
arg: WString,
token: Token,
token: UnaryToken,
range: Range,
}
@ -236,14 +297,14 @@ mod test_expressions {
struct BinaryPrimary {
arg_left: WString,
arg_right: WString,
token: Token,
token: BinaryToken,
range: Range,
}
/// Unary operator like bang.
struct UnaryOperator {
subject: Box<dyn Expression>,
token: Token,
token: UnaryBooleanToken,
range: Range,
}
@ -251,8 +312,7 @@ mod test_expressions {
/// we don't have to worry about precedence in the parser.
struct CombiningExpression {
subjects: Vec<Box<dyn Expression>>,
combiners: Vec<Token>,
token: Token,
combiners: Vec<Combiner>,
range: Range,
}
@ -262,6 +322,16 @@ mod test_expressions {
range: Range,
}
impl Expression for JustAString {
fn evaluate(&self, _streams: &mut IoStreams, _errors: &mut Vec<WString>) -> bool {
!self.arg.is_empty()
}
fn range(&self) -> Range {
self.range.clone()
}
}
impl Expression for UnaryPrimary {
fn evaluate(&self, streams: &mut IoStreams, errors: &mut Vec<WString>) -> bool {
unary_primary_evaluate(self.token, &self.arg, streams, errors)
@ -284,11 +354,8 @@ mod test_expressions {
impl Expression for UnaryOperator {
fn evaluate(&self, streams: &mut IoStreams, errors: &mut Vec<WString>) -> bool {
if self.token == Token::bang {
!self.subject.evaluate(streams, errors)
} else {
errors.push(L!("Unknown token type in unary_operator_evaluate").to_owned());
false
match self.token {
UnaryBooleanToken::Bang => !self.subject.evaluate(streams, errors),
}
}
@ -300,49 +367,45 @@ mod test_expressions {
impl Expression for CombiningExpression {
fn evaluate(&self, streams: &mut IoStreams, errors: &mut Vec<WString>) -> bool {
let _res = self.subjects[0].evaluate(streams, errors);
if self.token == Token::combine_and || self.token == Token::combine_or {
assert!(!self.subjects.is_empty());
assert!(self.combiners.len() + 1 == self.subjects.len());
assert!(!self.subjects.is_empty());
assert!(self.combiners.len() + 1 == self.subjects.len());
// One-element case.
if self.subjects.len() == 1 {
return self.subjects[0].evaluate(streams, errors);
// One-element case.
if self.subjects.len() == 1 {
return self.subjects[0].evaluate(streams, errors);
}
// Evaluate our lists, remembering that AND has higher precedence than OR. We can
// visualize this as a sequence of OR expressions of AND expressions.
let mut idx = 0;
let max = self.subjects.len();
let mut or_result = false;
while idx < max {
if or_result {
// short circuit
break;
}
// Evaluate our lists, remembering that AND has higher precedence than OR. We can
// visualize this as a sequence of OR expressions of AND expressions.
let mut idx = 0;
let max = self.subjects.len();
let mut or_result = false;
// Evaluate a stream of AND starting at given subject index. It may only have one
// element.
let mut and_result = true;
while idx < max {
if or_result {
// short circuit
// Evaluate it, short-circuiting.
and_result = and_result && self.subjects[idx].evaluate(streams, errors);
// If the combiner at this index (which corresponding to how we combine with the
// next subject) is not AND, then exit the loop.
if idx + 1 < max && self.combiners[idx] != Combiner::And {
idx += 1;
break;
}
// Evaluate a stream of AND starting at given subject index. It may only have one
// element.
let mut and_result = true;
while idx < max {
// Evaluate it, short-circuiting.
and_result = and_result && self.subjects[idx].evaluate(streams, errors);
// If the combiner at this index (which corresponding to how we combine with the
// next subject) is not AND, then exit the loop.
if idx + 1 < max && self.combiners[idx] != Token::combine_and {
idx += 1;
break;
}
idx += 1;
}
// OR it in.
or_result = or_result || and_result;
idx += 1;
}
return or_result;
// OR it in.
or_result = or_result || and_result;
}
errors.push(L!("Unknown token type in CombiningExpression.evaluate").to_owned());
false
return or_result;
}
fn range(&self) -> Range {
@ -374,19 +437,15 @@ mod test_expressions {
if start >= end {
return self.error(start, sprintf!("Missing argument at index %u", start + 1));
}
let tok = token_for_string(self.arg(start)).tok;
if tok == Token::bang {
let subject = self.parse_unary_expression(start + 1, end);
if let Some(subject) = subject {
let range = start..subject.range().end;
return UnaryOperator {
subject,
token: tok,
range,
}
.into_some_box();
if let Token::UnaryBoolean(token) = token_for_string(self.arg(start)) {
let subject = self.parse_unary_expression(start + 1, end)?;
let range = start..subject.range().end;
return UnaryOperator {
subject,
token,
range,
}
return None;
.into_some_box();
}
self.parse_primary(start, end)
}
@ -407,8 +466,7 @@ mod test_expressions {
while idx < end {
if !first {
// This is not the first expression, so we expect a combiner.
let combiner = token_for_string(self.arg(idx)).tok;
if combiner != Token::combine_and && combiner != Token::combine_or {
let Token::BinaryBoolean(combiner) = token_for_string(self.arg(idx)) else {
/* Not a combiner, we're done */
self.errors.insert(
0,
@ -419,7 +477,7 @@ mod test_expressions {
);
self.error_idx = idx;
break;
}
};
combiners.push(combiner);
idx += 1;
}
@ -448,7 +506,6 @@ mod test_expressions {
CombiningExpression {
subjects,
combiners,
token: Token::combine_and,
range: start..idx,
}
.into_some_box()
@ -467,40 +524,36 @@ mod test_expressions {
}
// All our unary primaries are prefix, so the operator is at start.
let info: &TokenInfo = token_for_string(self.arg(start));
if info.flags & UNARY_PRIMARY == 0 {
let Token::Unary(token) = token_for_string(self.arg(start)) else {
return None;
}
};
UnaryPrimary {
arg: self.arg(start + 1).to_owned(),
token: info.tok,
token,
range: start..start + 2,
}
.into_some_box()
}
fn parse_just_a_string(&mut self, start: usize, end: usize) -> Option<Box<dyn Expression>> {
// Handle a string as a unary primary that is not a token of any other type. e.g. 'test foo -a
// bar' should evaluate to true. We handle this with a unary primary of test_string_n.
// Handle a string as a unary primary that is not a token of any other type.
// e.g. 'test foo -a bar' should evaluate to true.
// We need one argument.
if start >= end {
return self.error(start, sprintf!("Missing argument at index %u", start + 1));
}
let info = token_for_string(self.arg(start));
if info.tok != Token::unknown {
let tok = token_for_string(self.arg(start));
if tok != Token::Unknown {
return self.error(
start,
sprintf!("Unexpected argument type at index %u", start + 1),
);
}
// This is hackish; a nicer way to implement this would be with a "just a string" expression
// type.
return UnaryPrimary {
return JustAString {
arg: self.arg(start).to_owned(),
token: Token::string_n,
range: start..start + 1,
}
.into_some_box();
@ -519,14 +572,13 @@ mod test_expressions {
}
// All our binary primaries are infix, so the operator is at start + 1.
let info = token_for_string(self.arg(start + 1));
if info.flags & BINARY_PRIMARY == 0 {
let Token::Binary(token) = token_for_string(self.arg(start + 1)) else {
return None;
}
};
BinaryPrimary {
arg_left: self.arg(start).to_owned(),
arg_right: self.arg(start + 2).to_owned(),
token: info.tok,
token,
range: start..start + 3,
}
.into_some_box()
@ -539,8 +591,7 @@ mod test_expressions {
}
// Must start with an open expression.
let open_paren = token_for_string(self.arg(start));
if open_paren.tok != Token::paren_open {
if token_for_string(self.arg(start)) != Token::ParenOpen {
return None;
}
@ -556,8 +607,7 @@ mod test_expressions {
sprintf!("Missing close paren at index %u", close_index + 1),
);
}
let close_paren = token_for_string(self.arg(close_index));
if close_paren.tok != Token::paren_close {
if token_for_string(self.arg(close_index)) != Token::ParenClose {
return self.error(
close_index,
sprintf!("Expected close paren at index %u", close_index + 1),
@ -599,31 +649,24 @@ mod test_expressions {
end: usize,
) -> Option<Box<dyn Expression>> {
assert!(end - start == 3);
let mut result = None;
let center_token = token_for_string(self.arg(start + 1));
if center_token.flags & BINARY_PRIMARY != 0 {
result = self.parse_binary_primary(start, end);
} else if center_token.tok == Token::combine_and
|| center_token.tok == Token::combine_or
{
let left = self.parse_unary_expression(start, start + 1);
let right = self.parse_unary_expression(start + 2, start + 3);
if left.is_some() && right.is_some() {
// Transfer ownership to the vector of subjects.
let combiners = vec![center_token.tok];
let subjects = vec![left.unwrap(), right.unwrap()];
result = CombiningExpression {
subjects,
combiners,
token: center_token.tok,
range: start..end,
}
.into_some_box()
if matches!(center_token, Token::Binary(_)) {
self.parse_binary_primary(start, end)
} else if let Token::BinaryBoolean(combiner) = center_token {
let left = self.parse_unary_expression(start, start + 1)?;
let right = self.parse_unary_expression(start + 2, start + 3)?;
// Transfer ownership to the vector of subjects.
CombiningExpression {
subjects: vec![left, right],
combiners: vec![combiner],
range: start..end,
}
.into_some_box()
} else {
result = self.parse_unary_expression(start, end);
self.parse_unary_expression(start, end)
}
result
}
fn parse_4_arg_expression(
@ -632,24 +675,22 @@ mod test_expressions {
end: usize,
) -> Option<Box<dyn Expression>> {
assert!(end - start == 4);
let mut result = None;
let first_token = token_for_string(self.arg(start)).tok;
if first_token == Token::bang {
let subject = self.parse_3_arg_expression(start + 1, end);
if let Some(subject) = subject {
result = UnaryOperator {
subject,
token: first_token,
range: start..end,
}
.into_some_box();
let first_token = token_for_string(self.arg(start));
if let Token::UnaryBoolean(token) = first_token {
let subject = self.parse_3_arg_expression(start + 1, end)?;
UnaryOperator {
subject,
token,
range: start..end,
}
} else if first_token == Token::paren_open {
result = self.parse_parenthetical(start, end);
.into_some_box()
} else if first_token == Token::ParenOpen {
self.parse_parenthetical(start, end)
} else {
result = self.parse_combining_expression(start, end);
self.parse_combining_expression(start, end)
}
result
}
fn parse_expression(&mut self, start: usize, end: usize) -> Option<Box<dyn Expression>> {
@ -826,168 +867,118 @@ mod test_expressions {
}
fn binary_primary_evaluate(
token: Token,
token: BinaryToken,
left: &wstr,
right: &wstr,
errors: &mut Vec<WString>,
) -> bool {
let mut ln = Number::default();
let mut rn = Number::default();
match token {
Token::string_equal => left == right,
Token::string_not_equal => left != right,
Token::file_newer => file_id_for_path(right).older_than(&file_id_for_path(left)),
Token::file_older => file_id_for_path(left).older_than(&file_id_for_path(right)),
Token::file_same => file_id_for_path(left) == file_id_for_path(right),
Token::number_equal => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln == rn
BinaryToken::String(StringComparison::Equal) => left == right,
BinaryToken::String(StringComparison::NotEqual) => left != right,
BinaryToken::FileId(comparison) => {
let left = file_id_for_path(left);
let right = file_id_for_path(right);
match comparison {
FileComparison::Newer => right.older_than(&left),
FileComparison::Older => left.older_than(&right),
FileComparison::Same => left == right,
}
}
Token::number_not_equal => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln != rn
}
Token::number_greater => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln > rn
}
Token::number_greater_equal => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln >= rn
}
Token::number_lesser => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln < rn
}
Token::number_lesser_equal => {
parse_number(left, &mut ln, errors)
&& parse_number(right, &mut rn, errors)
&& ln <= rn
}
_ => {
errors.push(L!("Unknown token type in binary_primary_evaluate").to_owned());
false
BinaryToken::Number(comparison) => {
let mut ln = Number::default();
let mut rn = Number::default();
if !parse_number(left, &mut ln, errors) || !parse_number(right, &mut rn, errors) {
return false;
}
match comparison {
NumberComparison::Equal => ln == rn,
NumberComparison::NotEqual => ln != rn,
NumberComparison::Greater => ln > rn,
NumberComparison::GreaterEqual => ln >= rn,
NumberComparison::Lesser => ln < rn,
NumberComparison::LesserEqual => ln <= rn,
}
}
}
}
fn unary_primary_evaluate(
token: Token,
token: UnaryToken,
arg: &wstr,
streams: &mut IoStreams,
errors: &mut Vec<WString>,
) -> bool {
const S_ISGID: u32 = 0o2000;
const S_ISVTX: u32 = 0o1000;
// Helper to call wstat and then apply a function to the result.
fn stat_and<F>(arg: &wstr, f: F) -> bool
where
F: FnOnce(std::fs::Metadata) -> bool,
{
wstat(arg).map_or(false, f)
}
match token {
Token::filetype_b => {
// "-b", for block special files
stat_and(arg, |buf| buf.file_type().is_block_device())
#[allow(clippy::unnecessary_cast)] // mode_t is u32 on many platforms, but not all
UnaryToken::FileStat(stat_token) => {
let Ok(md) = wstat(arg) else {
return false;
};
const S_ISUID: u32 = libc::S_ISUID as u32;
const S_ISGID: u32 = libc::S_ISGID as u32;
const S_ISVTX: u32 = libc::S_ISVTX as u32;
match stat_token {
// "-b", for block special files
StatPredicate::b => md.file_type().is_block_device(),
// "-c", for character special files
StatPredicate::c => md.file_type().is_char_device(),
// "-d", for directories
StatPredicate::d => md.file_type().is_dir(),
// "-e", for files that exist
StatPredicate::e => true,
// "-f", for regular files
StatPredicate::f => md.file_type().is_file(),
// "-G", for check effective group id
StatPredicate::G => md.gid() == crate::nix::getegid(),
// "-g", for set-group-id
StatPredicate::g => md.permissions().mode() & S_ISGID != 0,
// "-k", for sticky bit
StatPredicate::k => md.permissions().mode() & S_ISVTX != 0,
// "-O", for check effective user id
StatPredicate::O => md.uid() == crate::nix::geteuid(),
// "-p", for FIFO
StatPredicate::p => md.file_type().is_fifo(),
// "-S", socket
StatPredicate::S => md.file_type().is_socket(),
// "-s", size greater than zero
StatPredicate::s => md.len() > 0,
// "-u", whether file is setuid
StatPredicate::u => md.permissions().mode() & S_ISUID != 0,
}
}
Token::filetype_c => {
// "-c", for character special files
stat_and(arg, |buf: std::fs::Metadata| {
buf.file_type().is_char_device()
})
UnaryToken::FileType(file_type) => {
match file_type {
// "-h", for symbolic links
// "-L", same as -h
FilePredicate::h | FilePredicate::L => {
lwstat(arg).is_ok_and(|md| md.file_type().is_symlink())
}
// "-t", whether the fd is associated with a terminal
FilePredicate::t => {
let mut num = Number::default();
parse_number(arg, &mut num, errors) && num.isatty(streams)
}
}
}
Token::filetype_d => {
// "-d", for directories
stat_and(arg, |buf: std::fs::Metadata| buf.file_type().is_dir())
UnaryToken::FilePerm(permission) => {
let mode = match permission {
// "-r", read permission
FilePermission::r => libc::R_OK,
// "-w", whether file write permission is allowed
FilePermission::w => libc::W_OK,
// "-x", whether file execute/search is allowed
FilePermission::x => libc::X_OK,
};
waccess(arg, mode) == 0
}
Token::filetype_e => {
// "-e", for files that exist
stat_and(arg, |_| true)
}
Token::filetype_f => {
// "-f", for regular files
stat_and(arg, |buf| buf.file_type().is_file())
}
Token::filetype_G => {
// "-G", for check effective group id
stat_and(arg, |buf| crate::nix::getegid() == buf.gid())
}
Token::filetype_g => {
// "-g", for set-group-id
stat_and(arg, |buf| buf.permissions().mode() & S_ISGID != 0)
}
Token::filetype_h | Token::filetype_L => {
// "-h", for symbolic links
// "-L", same as -h
lwstat(arg).map_or(false, |buf| buf.file_type().is_symlink())
}
Token::filetype_k => {
// "-k", for sticky bit
stat_and(arg, |buf| buf.permissions().mode() & S_ISVTX != 0)
}
Token::filetype_O => {
// "-O", for check effective user id
stat_and(arg, |buf: std::fs::Metadata| {
crate::nix::geteuid() == buf.uid()
})
}
Token::filetype_p => {
// "-p", for FIFO
stat_and(arg, |buf: std::fs::Metadata| buf.file_type().is_fifo())
}
Token::filetype_S => {
// "-S", socket
stat_and(arg, |buf| buf.file_type().is_socket())
}
Token::filesize_s => {
// "-s", size greater than zero
stat_and(arg, |buf| buf.len() > 0)
}
Token::filedesc_t => {
// "-t", whether the fd is associated with a terminal
let mut num = Number::default();
parse_number(arg, &mut num, errors) && num.isatty(streams)
}
Token::fileperm_r => {
// "-r", read permission
waccess(arg, libc::R_OK) == 0
}
Token::fileperm_u => {
// "-u", whether file is setuid
#[allow(clippy::unnecessary_cast)]
stat_and(arg, |buf| {
buf.permissions().mode() & (libc::S_ISUID as u32) != 0
})
}
Token::fileperm_w => {
// "-w", whether file write permission is allowed
waccess(arg, libc::W_OK) == 0
}
Token::fileperm_x => {
// "-x", whether file execute/search is allowed
waccess(arg, libc::X_OK) == 0
}
Token::string_n => {
UnaryToken::String(predicate) => match predicate {
// "-n", non-empty string
!arg.is_empty()
}
Token::string_z => {
StringPredicate::n => !arg.is_empty(),
// "-z", true if length of string is 0
arg.is_empty()
}
_ => {
// Unknown token.
errors.push(L!("Unknown token type in unary_primary_evaluate").to_owned());
false
}
StringPredicate::z => arg.is_empty(),
},
}
}
}