]>
git.ipfire.org Git - thirdparty/systemd.git/blob - coccinelle/run-coccinelle.sh
bb72a493f08f300d43355f17fc3b943298a7d326
2 # SPDX-License-Identifier: LGPL-2.1-or-later
6 # - Coccinelle doesn't like our TEST() macros, which then causes name conflicts; i.e. Cocci can't process
7 # that TEST(xsetxattr) yields test_xsetxattr() and uses just xsetxattr() in this case, which then conflicts
8 # with the tested xsetxattr() function, leading up to the whole test case getting skipped due to
10 # - something keeps pulling in src/boot/efi/*.h stuff, even though it's excluded
11 # - Coccinelle has issues with some of our more complex macros
13 # Exclude following paths from the Coccinelle transformations
18 # Symlinked to test-bus-vtable-cc.cc, which causes issues with the IN_SET macro
19 "src/libsystemd/sd-bus/test-bus-vtable.c"
20 "src/libsystemd/sd-journal/lookup3.c"
21 # Ignore man examples, as they redefine some macros we use internally, which makes Coccinelle complain
22 # and ignore code that tries to use the redefined stuff
26 TOP_DIR
="$(git rev-parse --show-toplevel)"
27 CACHE_DIR
="$(dirname "$0")/.coccinelle-cache"
30 # Create an array from files tracked by git...
31 mapfile
-t FILES
< <(git ls-files
':/*.c')
32 # ...and filter everything that matches patterns from EXCLUDED_PATHS
33 for excl
in "${EXCLUDED_PATHS[@]}"; do
34 # shellcheck disable=SC2206
35 FILES
=(${FILES[@]//$excl})
45 if ! parallel
-h >/dev
/null
; then
46 echo 'Please install GNU parallel (package "parallel")'
50 [[ ${#@} -ne 0 ]] && SCRIPTS
=("$@") || SCRIPTS
=("$TOP_DIR"/coccinelle
/*.cocci
)
53 echo "--x-- Using Coccinelle cache directory: $CACHE_DIR"
55 echo "--x-- Note: running spatch for the first time without populated cache takes"
56 echo "--x-- a _long_ time (15-30 minutes). Also, the cache is quite large"
57 echo "--x-- (~15 GiB), so make sure you have enough free space."
60 for script in "${SCRIPTS[@]}"; do
61 echo "--x-- Processing $script --x--"
63 echo "+ spatch --sp-file $script ${ARGS[*]} ..."
66 # 1) Limit this to 10 files at once, as processing the ASTs is _very_ memory hungry - e.g. with 20 files
67 # at once one spatch process can take around 2.5 GiB of RAM, which can easily eat up all available RAM
68 # when paired together with parallel
70 # 2) Make sure spatch can find our includes via -I <dir>, similarly as we do when compiling stuff
72 # 3) Make sure to include includes from includes (--recursive-includes), but use them only to get type
73 # definitions (--include-headers-for-types) - otherwise we'd start formating them as well, which might be
74 # unwanted, especially for includes we fetch verbatim from third-parties
76 # 4) Use cache, since generating the full AST is _very_ expensive, i.e. the uncached run takes 15 - 30
77 # minutes (for one rule(!)), vs 30 - 90 seconds when the cache is populated. One major downside of the
78 # cache is that it's quite big - ATTOW the cache takes around 15 GiB, but the performance boost is
80 parallel
--halt now
,fail
=1 --keep-order --noswap --max-args=10 \
81 spatch
--cache-prefix "$CACHE_DIR" \
83 --recursive-includes \
84 --include-headers-for-types \
87 "${ARGS[@]}" ::: "${FILES[@]}" \
88 2>"$TMPFILE" ||
cat "$TMPFILE"
90 echo -e "--x-- Processed $script --x--\n"