From: norbert.bizet Date: Thu, 13 Oct 2022 10:05:44 +0000 (-0400) Subject: cloud: exercice StorageClass directive for Amazon cloud driver X-Git-Tag: Beta-15.0.0~487 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=254285964d94a46e6a08e05f52e3728a4fab6151;p=thirdparty%2Fbacula.git cloud: exercice StorageClass directive for Amazon cloud driver --- diff --git a/regress/tests/cloud-storageclass-test b/regress/tests/cloud-storageclass-test new file mode 100755 index 000000000..91645c956 --- /dev/null +++ b/regress/tests/cloud-storageclass-test @@ -0,0 +1,92 @@ +#!/bin/sh +# +# Copyright (C) 2000-2021 Kern Sibbald +# Copyright (C) 2021-2022 Bacula Systems SA +# License: BSD 2-Clause; see file LICENSE-FOSS +# +# Cloud storageclass test. Exercice the StorageClass directive. +# Only available for AWSDriver (Amazon) +# +TestName="cloud-storageclass-test" +JobName=NightlySave +. scripts/functions + +require_cloud + +#config is required for cloud cleanup +scripts/copy-test-confs +scripts/cleanup + +FORCE_FILE_SET=${FORCE_FILE_SET:-"${cwd}/build"} +echo "$FORCE_FILE_SET" >${cwd}/tmp/file-list + +start_test + +#requires Amazon +cloud_driver=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'Driver')" | awk '{print tolower($0)}') +if [ "x$cloud_driver" != "xamazon" ]; then + echo "requires Amazon cloud driver. $cloud_driver found. Test skipped." + exit 0 +fi + +# choose infrequent access for quick test +$bperl -e 'add_attribute("$conf/bacula-sd.conf", "StorageClass", "S3StandardIA", "Cloud")' +# extract the bucket name +bucket_name=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'BucketName')" | awk '{print tolower($0)}') + + +##### Label +cat <${cwd}/tmp/bconcmds +@output /dev/null +messages +@$out ${cwd}/tmp/log0.out +setdebug tags=cloud level=50 trace=1 storage +label storage=File volume=Vol1 +END_OF_DATA + +# do label +run_bacula + +###### Backup +cat <${cwd}/tmp/bconcmds +@output /dev/null +messages +@$out ${cwd}/tmp/log1.out +setdebug tags=cloud level=50 trace=1 storage +run job=$JobName level=Full yes +wait +list volumes +llist volume=Vol1 +messages +truncate cache volume=Vol1 storage=File +END_OF_DATA + +run_bconsole + +# Simply use the CLI to retrieve the storage class of Vol1/part.2 in bucket $bucket_name +region=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'Region')") +export AWS_DEFAULT_REGION=$region +access_key=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'AccessKey')") +export AWS_ACCESS_KEY_ID=$access_key +secret_key=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'SecretKey')") +export AWS_SECRET_ACCESS_KEY=$secret_key + +headobject=$(aws s3api head-object --bucket $bucket_name --key Vol1/part.2) + +echo $headobject + +echo "$headobject" | grep -q '"StorageClass"' +if test $? -ne 0; then + echo "Error: No StorageClass found" + estat=1 +fi + +echo "$headobject" | grep -q '"StorageClass": "STANDARD_IA"' +if test $? -ne 0; then + echo "Error: StorageClass is not STANDARD_IA" + estat=2 +fi + +check_for_zombie_jobs storage=File +stop_bacula +end_test