]> git.ipfire.org Git - thirdparty/linux.git/blob - tools/testing/selftests/ftrace/ftracetest
Merge tag 'io_uring-5.7-2020-05-22' of git://git.kernel.dk/linux-block
[thirdparty/linux.git] / tools / testing / selftests / ftrace / ftracetest
1 #!/bin/sh
2 # SPDX-License-Identifier: GPL-2.0-only
3
4 # ftracetest - Ftrace test shell scripts
5 #
6 # Copyright (C) Hitachi Ltd., 2014
7 # Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8 #
9
10 usage() { # errno [message]
11 [ ! -z "$2" ] && echo $2
12 echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13 echo " Options:"
14 echo " -h|--help Show help message"
15 echo " -k|--keep Keep passed test logs"
16 echo " -v|--verbose Increase verbosity of test messages"
17 echo " -vv Alias of -v -v (Show all results in stdout)"
18 echo " -vvv Alias of -v -v -v (Show all commands immediately)"
19 echo " --fail-unsupported Treat UNSUPPORTED as a failure"
20 echo " --fail-unresolved Treat UNRESOLVED as a failure"
21 echo " -d|--debug Debug mode (trace all shell commands)"
22 echo " -l|--logdir <dir> Save logs on the <dir>"
23 echo " If <dir> is -, all logs output in console only"
24 exit $1
25 }
26
27 # default error
28 err_ret=1
29
30 # kselftest skip code is 4
31 err_skip=4
32
33 # cgroup RT scheduling prevents chrt commands from succeeding, which
34 # induces failures in test wakeup tests. Disable for the duration of
35 # the tests.
36
37 readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
38
39 sched_rt_runtime_orig=$(cat $sched_rt_runtime)
40
41 setup() {
42 echo -1 > $sched_rt_runtime
43 }
44
45 cleanup() {
46 echo $sched_rt_runtime_orig > $sched_rt_runtime
47 }
48
49 errexit() { # message
50 echo "Error: $1" 1>&2
51 cleanup
52 exit $err_ret
53 }
54
55 # Ensuring user privilege
56 if [ `id -u` -ne 0 ]; then
57 errexit "this must be run by root user"
58 fi
59
60 setup
61
62 # Utilities
63 absdir() { # file_path
64 (cd `dirname $1`; pwd)
65 }
66
67 abspath() {
68 echo `absdir $1`/`basename $1`
69 }
70
71 find_testcases() { #directory
72 echo `find $1 -name \*.tc | sort`
73 }
74
75 parse_opts() { # opts
76 local OPT_TEST_CASES=
77 local OPT_TEST_DIR=
78
79 while [ ! -z "$1" ]; do
80 case "$1" in
81 --help|-h)
82 usage 0
83 ;;
84 --keep|-k)
85 KEEP_LOG=1
86 shift 1
87 ;;
88 --verbose|-v|-vv|-vvv)
89 if [ $VERBOSE -eq -1 ]; then
90 usage "--console can not use with --verbose"
91 fi
92 VERBOSE=$((VERBOSE + 1))
93 [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
94 [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
95 shift 1
96 ;;
97 --console)
98 if [ $VERBOSE -ne 0 ]; then
99 usage "--console can not use with --verbose"
100 fi
101 VERBOSE=-1
102 shift 1
103 ;;
104 --debug|-d)
105 DEBUG=1
106 shift 1
107 ;;
108 --stop-fail)
109 STOP_FAILURE=1
110 shift 1
111 ;;
112 --fail-unsupported)
113 UNSUPPORTED_RESULT=1
114 shift 1
115 ;;
116 --fail-unresolved)
117 UNRESOLVED_RESULT=1
118 shift 1
119 ;;
120 --logdir|-l)
121 LOG_DIR=$2
122 shift 2
123 ;;
124 *.tc)
125 if [ -f "$1" ]; then
126 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
127 shift 1
128 else
129 usage 1 "$1 is not a testcase"
130 fi
131 ;;
132 *)
133 if [ -d "$1" ]; then
134 OPT_TEST_DIR=`abspath $1`
135 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
136 shift 1
137 else
138 usage 1 "Invalid option ($1)"
139 fi
140 ;;
141 esac
142 done
143 if [ ! -z "$OPT_TEST_CASES" ]; then
144 TEST_CASES=$OPT_TEST_CASES
145 fi
146 }
147
148 # Parameters
149 TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
150 if [ -z "$TRACING_DIR" ]; then
151 DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
152 if [ -z "$DEBUGFS_DIR" ]; then
153 # If tracefs exists, then so does /sys/kernel/tracing
154 if [ -d "/sys/kernel/tracing" ]; then
155 mount -t tracefs nodev /sys/kernel/tracing ||
156 errexit "Failed to mount /sys/kernel/tracing"
157 TRACING_DIR="/sys/kernel/tracing"
158 # If debugfs exists, then so does /sys/kernel/debug
159 elif [ -d "/sys/kernel/debug" ]; then
160 mount -t debugfs nodev /sys/kernel/debug ||
161 errexit "Failed to mount /sys/kernel/debug"
162 TRACING_DIR="/sys/kernel/debug/tracing"
163 else
164 err_ret=$err_skip
165 errexit "debugfs and tracefs are not configured in this kernel"
166 fi
167 else
168 TRACING_DIR="$DEBUGFS_DIR/tracing"
169 fi
170 fi
171 if [ ! -d "$TRACING_DIR" ]; then
172 err_ret=$err_skip
173 errexit "ftrace is not configured in this kernel"
174 fi
175
176 TOP_DIR=`absdir $0`
177 TEST_DIR=$TOP_DIR/test.d
178 TEST_CASES=`find_testcases $TEST_DIR`
179 LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
180 KEEP_LOG=0
181 DEBUG=0
182 VERBOSE=0
183 UNSUPPORTED_RESULT=0
184 UNRESOLVED_RESULT=0
185 STOP_FAILURE=0
186 # Parse command-line options
187 parse_opts $*
188
189 [ $DEBUG -ne 0 ] && set -x
190
191 # Verify parameters
192 if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
193 errexit "No ftrace directory found"
194 fi
195
196 # Preparing logs
197 if [ "x$LOG_DIR" = "x-" ]; then
198 LOG_FILE=
199 date
200 else
201 LOG_FILE=$LOG_DIR/ftracetest.log
202 mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
203 date > $LOG_FILE
204 fi
205
206 # Define text colors
207 # Check available colors on the terminal, if any
208 ncolors=`tput colors 2>/dev/null || echo 0`
209 color_reset=
210 color_red=
211 color_green=
212 color_blue=
213 # If stdout exists and number of colors is eight or more, use them
214 if [ -t 1 -a "$ncolors" -ge 8 ]; then
215 color_reset="\033[0m"
216 color_red="\033[31m"
217 color_green="\033[32m"
218 color_blue="\033[34m"
219 fi
220
221 strip_esc() {
222 # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
223 sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
224 }
225
226 prlog() { # messages
227 newline="\n"
228 if [ "$1" = "-n" ] ; then
229 newline=
230 shift
231 fi
232 printf "$*$newline"
233 [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
234 }
235 catlog() { #file
236 cat $1
237 [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
238 }
239 prlog "=== Ftrace unit tests ==="
240
241
242 # Testcase management
243 # Test result codes - Dejagnu extended code
244 PASS=0 # The test succeeded.
245 FAIL=1 # The test failed, but was expected to succeed.
246 UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted)
247 UNTESTED=3 # The test was not run, currently just a placeholder.
248 UNSUPPORTED=4 # The test failed because of lack of feature.
249 XFAIL=5 # The test failed, and was expected to fail.
250
251 # Accumulations
252 PASSED_CASES=
253 FAILED_CASES=
254 UNRESOLVED_CASES=
255 UNTESTED_CASES=
256 UNSUPPORTED_CASES=
257 XFAILED_CASES=
258 UNDEFINED_CASES=
259 TOTAL_RESULT=0
260
261 INSTANCE=
262 CASENO=0
263
264 testcase() { # testfile
265 CASENO=$((CASENO+1))
266 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
267 prlog -n "[$CASENO]$INSTANCE$desc"
268 }
269
270 test_on_instance() { # testfile
271 grep -q "^#[ \t]*flags:.*instance" $1
272 }
273
274 eval_result() { # sigval
275 case $1 in
276 $PASS)
277 prlog " [${color_green}PASS${color_reset}]"
278 PASSED_CASES="$PASSED_CASES $CASENO"
279 return 0
280 ;;
281 $FAIL)
282 prlog " [${color_red}FAIL${color_reset}]"
283 FAILED_CASES="$FAILED_CASES $CASENO"
284 return 1 # this is a bug.
285 ;;
286 $UNRESOLVED)
287 prlog " [${color_blue}UNRESOLVED${color_reset}]"
288 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
289 return $UNRESOLVED_RESULT # depends on use case
290 ;;
291 $UNTESTED)
292 prlog " [${color_blue}UNTESTED${color_reset}]"
293 UNTESTED_CASES="$UNTESTED_CASES $CASENO"
294 return 0
295 ;;
296 $UNSUPPORTED)
297 prlog " [${color_blue}UNSUPPORTED${color_reset}]"
298 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
299 return $UNSUPPORTED_RESULT # depends on use case
300 ;;
301 $XFAIL)
302 prlog " [${color_green}XFAIL${color_reset}]"
303 XFAILED_CASES="$XFAILED_CASES $CASENO"
304 return 0
305 ;;
306 *)
307 prlog " [${color_blue}UNDEFINED${color_reset}]"
308 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
309 return 1 # this must be a test bug
310 ;;
311 esac
312 }
313
314 # Signal handling for result codes
315 SIG_RESULT=
316 SIG_BASE=36 # Use realtime signals
317 SIG_PID=$$
318
319 exit_pass () {
320 exit 0
321 }
322
323 SIG_FAIL=$((SIG_BASE + FAIL))
324 exit_fail () {
325 exit 1
326 }
327 trap 'SIG_RESULT=$FAIL' $SIG_FAIL
328
329 SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
330 exit_unresolved () {
331 kill -s $SIG_UNRESOLVED $SIG_PID
332 exit 0
333 }
334 trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
335
336 SIG_UNTESTED=$((SIG_BASE + UNTESTED))
337 exit_untested () {
338 kill -s $SIG_UNTESTED $SIG_PID
339 exit 0
340 }
341 trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
342
343 SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
344 exit_unsupported () {
345 kill -s $SIG_UNSUPPORTED $SIG_PID
346 exit 0
347 }
348 trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
349
350 SIG_XFAIL=$((SIG_BASE + XFAIL))
351 exit_xfail () {
352 kill -s $SIG_XFAIL $SIG_PID
353 exit 0
354 }
355 trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
356
357 __run_test() { # testfile
358 # setup PID and PPID, $$ is not updated.
359 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
360 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
361 }
362
363 # Run one test case
364 run_test() { # testfile
365 local testname=`basename $1`
366 testcase $1
367 if [ ! -z "$LOG_FILE" ] ; then
368 local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
369 else
370 local testlog=/proc/self/fd/1
371 fi
372 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
373 export FTRACETEST_ROOT=$TOP_DIR
374 echo "execute$INSTANCE: "$1 > $testlog
375 SIG_RESULT=0
376 if [ $VERBOSE -eq -1 ]; then
377 __run_test $1
378 elif [ -z "$LOG_FILE" ]; then
379 __run_test $1 2>&1
380 elif [ $VERBOSE -ge 3 ]; then
381 __run_test $1 | tee -a $testlog 2>&1
382 elif [ $VERBOSE -eq 2 ]; then
383 __run_test $1 2>> $testlog | tee -a $testlog
384 else
385 __run_test $1 >> $testlog 2>&1
386 fi
387 eval_result $SIG_RESULT
388 if [ $? -eq 0 ]; then
389 # Remove test log if the test was done as it was expected.
390 [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
391 else
392 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
393 TOTAL_RESULT=1
394 fi
395 rm -rf $TMPDIR
396 }
397
398 # load in the helper functions
399 . $TEST_DIR/functions
400
401 # Main loop
402 for t in $TEST_CASES; do
403 run_test $t
404 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
405 echo "A failure detected. Stop test."
406 exit 1
407 fi
408 done
409
410 # Test on instance loop
411 INSTANCE=" (instance) "
412 for t in $TEST_CASES; do
413 test_on_instance $t || continue
414 SAVED_TRACING_DIR=$TRACING_DIR
415 export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
416 run_test $t
417 rmdir $TRACING_DIR
418 TRACING_DIR=$SAVED_TRACING_DIR
419 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
420 echo "A failure detected. Stop test."
421 exit 1
422 fi
423 done
424 (cd $TRACING_DIR; initialize_ftrace) # for cleanup
425
426 prlog ""
427 prlog "# of passed: " `echo $PASSED_CASES | wc -w`
428 prlog "# of failed: " `echo $FAILED_CASES | wc -w`
429 prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
430 prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
431 prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
432 prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
433 prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
434
435 cleanup
436
437 # if no error, return 0
438 exit $TOTAL_RESULT