]> git.ipfire.org Git - thirdparty/bacula.git/commitdiff
Add a regression test for exercising MT10291
authornorbert.bizet <norbert.bizet@baculasystems.com>
Thu, 21 Dec 2023 15:30:50 +0000 (10:30 -0500)
committerEric Bollengier <eric@baculasystems.com>
Tue, 13 Feb 2024 09:36:03 +0000 (10:36 +0100)
regress/tests/cloud-truncate-and-purge-MT10291 [new file with mode: 0755]

diff --git a/regress/tests/cloud-truncate-and-purge-MT10291 b/regress/tests/cloud-truncate-and-purge-MT10291
new file mode 100755 (executable)
index 0000000..0587f64
--- /dev/null
@@ -0,0 +1,133 @@
+#!/bin/sh
+#
+# Copyright (C) 2000-2021 Kern Sibbald
+# Copyright (C) 2021-2023 Bacula Systems SA
+# License: BSD 2-Clause; see file LICENSE-FOSS
+#
+# Cloud test. We run backups and restore with the "truncate cache" command in-between
+#
+TestName="cloud-test"
+JobName=NightlySave
+. scripts/functions
+
+require_cloud
+
+#config is required for cloud cleanup
+scripts/copy-test-confs
+scripts/cleanup
+
+FORCE_FILE_SET=${FORCE_FILE_SET:-"${cwd}/build"}
+echo "$FORCE_FILE_SET" >${cwd}/tmp/file-list
+
+start_test
+
+$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumPartSize", "10000000", "Device")'
+$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumUploadBandwidth", "4MB/s", "Cloud")'
+$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumDownloadBandwidth", "4MB/s", "Cloud")'
+
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+setdebug level=50 tags=cloud trace=1 storage
+label storage=File volume=Vol1
+END_OF_DATA
+
+# do label
+run_bacula
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+setdebug level=50 tags=cloud trace=1 storage
+run job=$JobName level=Full yes
+wait
+list volumes
+llist volume=Vol1
+messages
+truncate cache volume=Vol1 storage=File
+END_OF_DATA
+
+run_bconsole
+ls -l tmp/Vol*
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds-form
+@# 
+@# now do a restore
+@#
+@$out ${cwd}/tmp/log2.out
+@#setdebug level=200 client
+@#setdebug level=350 storage
+truncate cache volume=Vol1 storage=File
+@exec "ls -l ${cwd}/tmp/Vol1"
+restore where=${cwd}/tmp/bacula-restores storage=File jobid=1
+mark *
+done
+yes
+wait
+messages
+sql
+select * from JobMedia;
+
+@$out $tmp/log31.out
+cloud list storage=File
+@$out $tmp/log3.out
+cloud list volume=Vol1 storage=File
+
+purge volume=Vol1
+
+END_OF_DATA
+
+rm -rf ${cwd}/tmp/bacula-restores $tmp/log3*out
+echo "s%@jobid@%$i%" >${cwd}/tmp/in
+sed -f ${cwd}/tmp/in ${cwd}/tmp/bconcmds-form >${cwd}/tmp/bconcmds
+run_bconsole
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+setdebug level=50 tags=cloud trace=1 storage
+run job=$JobName level=Full yes
+wait
+list volumes
+llist volume=Vol1
+messages
+truncate cache volume=Vol1 storage=File
+END_OF_DATA
+
+run_bconsole
+ls -l tmp/Vol*
+
+
+nb=`cat $tmp/log3.out | wc -l`
+if [ "$nb" -lt 10 ]; then
+print_debug "ERROR: Not enough lines in $tmp/log3.out for the parts list"
+estat=1
+fi
+
+grep Vol1 $tmp/log31.out > /dev/null
+if [ $? -ne 0 ]; then
+print_debug "ERROR: Unable to find the volume list in $tmp/log31.out"
+estat=1
+fi
+
+ls -l tmp/Vol* >>${cwd}/tmp/log5.out
+ls -l ${cwd}/tmp/Vol1
+
+
+sleep 2
+check_for_zombie_jobs storage=File 
+stop_bacula
+
+check_two_logs
+#check_restore_diff
+
+$rscripts/diff.pl -s "$FORCE_FILE_SET" -d "$cwd/tmp/bacula-restores/$FORCE_FILE_SET"
+if test $? -ne 0; then
+    dstat=$?
+fi
+
+end_test