This fixes test failures seen on Haiku.
* tests/ls/recursive.sh: Run 'ls' even if ulimit fails.
* tests/split/r-chunk.sh: Run 'split' even if ulimit fails.
* tests/sort/sort-merge-fdlimit.sh: Skip test if 'ulimit -n' cannot set
file descriptor limits.
Reported by Bruno Haible.
# Check that we don't run out of file descriptors when visiting
# directories recursively.
mkdir -p $(seq 30 | tr '\n' '/') || framework_failure_
-(ulimit -n 20 && ls -R 1 > out 2> err) || fail=1
+(ulimit -n 20; ls -R 1 > out 2> err) || fail=1
test $(wc -l < out) = 88 || fail=1
test $(wc -l < err) = 0 || fail=1
# the ATF but fail inside it.
# The default batch size (nmerge) is 16.
-(ulimit -n 19 \
+(ulimit -n 19 && touch ulimit-worked \
&& sort -m --batch-size=16 in/* 2>err/merge-default-err \
|| ! grep "open failed" err/merge-default-err) || fail=1
+test -f ulimit-worked || skip_ 'cannot modify open file descriptor limit'
+
# If sort opens a file to sort by random hashes of keys,
# it needs to consider this file against its limit on open file
# descriptors. Test once with the default random source
# Ensure we fall back to appending to a file at a time
# if we hit the limit for the number of open files.
rm x*
-(ulimit -n 20 && yes | head -n90 | split -n r/30 ) || fail=1
+(ulimit -n 20; yes | head -n90 | split -n r/30 ) || fail=1
test "$(stat -c %s x* | uniq -c | sed 's/^ *//; s/ /x/')" = "30x6" || fail=1
Exit $fail