]> git.ipfire.org Git - thirdparty/bacula.git/commitdiff
cloud: exercice StorageClass directive for Amazon cloud driver
authornorbert.bizet <norbert.bizet@baculasystems.com>
Thu, 13 Oct 2022 10:05:44 +0000 (06:05 -0400)
committerEric Bollengier <eric@baculasystems.com>
Thu, 14 Sep 2023 11:56:59 +0000 (13:56 +0200)
regress/tests/cloud-storageclass-test [new file with mode: 0755]

diff --git a/regress/tests/cloud-storageclass-test b/regress/tests/cloud-storageclass-test
new file mode 100755 (executable)
index 0000000..91645c9
--- /dev/null
@@ -0,0 +1,92 @@
+#!/bin/sh
+#
+# Copyright (C) 2000-2021 Kern Sibbald
+# Copyright (C) 2021-2022 Bacula Systems SA
+# License: BSD 2-Clause; see file LICENSE-FOSS
+#
+# Cloud storageclass test. Exercice the StorageClass directive.
+# Only available for AWSDriver (Amazon)
+#
+TestName="cloud-storageclass-test"
+JobName=NightlySave
+. scripts/functions
+
+require_cloud
+
+#config is required for cloud cleanup
+scripts/copy-test-confs
+scripts/cleanup
+
+FORCE_FILE_SET=${FORCE_FILE_SET:-"${cwd}/build"}
+echo "$FORCE_FILE_SET" >${cwd}/tmp/file-list
+
+start_test
+
+#requires Amazon 
+cloud_driver=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'Driver')" | awk '{print tolower($0)}') 
+if [ "x$cloud_driver" != "xamazon" ]; then
+    echo "requires Amazon cloud driver. $cloud_driver found. Test skipped."
+    exit 0
+fi
+
+# choose infrequent access for quick test
+$bperl -e 'add_attribute("$conf/bacula-sd.conf", "StorageClass", "S3StandardIA", "Cloud")'
+# extract the bucket name
+bucket_name=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'BucketName')" | awk '{print tolower($0)}') 
+
+
+##### Label
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log0.out
+setdebug tags=cloud level=50 trace=1 storage
+label storage=File volume=Vol1
+END_OF_DATA
+
+# do label
+run_bacula
+
+###### Backup
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+setdebug tags=cloud level=50 trace=1 storage
+run job=$JobName level=Full yes
+wait
+list volumes
+llist volume=Vol1
+messages
+truncate cache volume=Vol1 storage=File
+END_OF_DATA
+
+run_bconsole
+
+# Simply use the CLI to retrieve the storage class of Vol1/part.2 in bucket $bucket_name
+region=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'Region')") 
+export AWS_DEFAULT_REGION=$region
+access_key=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'AccessKey')") 
+export AWS_ACCESS_KEY_ID=$access_key
+secret_key=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'SecretKey')") 
+export AWS_SECRET_ACCESS_KEY=$secret_key
+
+headobject=$(aws s3api head-object --bucket $bucket_name --key Vol1/part.2)
+
+echo $headobject
+
+echo "$headobject" | grep -q '"StorageClass"'
+if test $? -ne 0; then
+    echo "Error: No StorageClass found"
+    estat=1
+fi
+
+echo "$headobject" | grep -q '"StorageClass": "STANDARD_IA"'
+if test $? -ne 0; then
+    echo "Error: StorageClass is not STANDARD_IA"
+    estat=2
+fi
+
+check_for_zombie_jobs storage=File 
+stop_bacula
+end_test