From: norbert.bizet Date: Wed, 27 Jul 2022 10:05:34 +0000 (-0400) Subject: cloud: rename and specialize regress test to check aws StorageClass attribute X-Git-Tag: Beta-15.0.0~538 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=22b6ff0e4c01572da7a31fba5cc8f4ee9bf21ced;p=thirdparty%2Fbacula.git cloud: rename and specialize regress test to check aws StorageClass attribute --- diff --git a/regress/tests/cloud-aws-storageclass-test b/regress/tests/cloud-aws-storageclass-test new file mode 100755 index 000000000..10f38391c --- /dev/null +++ b/regress/tests/cloud-aws-storageclass-test @@ -0,0 +1,99 @@ +#!/bin/bash +# +# Copyright (C) 2000-2021 Kern Sibbald +# Copyright (C) 2021-2022 Bacula Systems SA +# License: BSD 2-Clause; see file LICENSE-FOSS +# +# Cloud test. We run backups and restore with the "truncate cache" command in-between +# +TestName="cloud-aws-storageclass-test" +JobName=NightlySave +. scripts/functions + +require_cloud + +#config is required for cloud cleanup +scripts/copy-test-confs +scripts/cleanup + +which aws > /dev/null +if test $? -ne 0; then +echo "aws cli must be installed. Abort" +exit 1 +fi + +start_test + +# force aws driver +$bperl -e 'add_attribute("$conf/bacula-sd.conf", "Driver", "amazon", "Cloud")' +$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumPartSize", "10000000", "Device")' + +BucketName=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'BucketName')") +AccessKey=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'AccessKey')") +SecretKey=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'SecretKey')") +Region=$($bperl -e "get_attribute('$conf/bacula-sd.conf', 'Cloud', '$CLOUD_NAME', 'Region')") + +#export AWS_DEFAULT_REGION=$Region +#export AWS_ACCESS_KEY_ID=$AccessKey +#export AWS_SECRET_ACCESS_KEY=$SecretKey + +declare -A tiers +tiers[0]=S3Standard +tiers[1]=S3StandardIA +tiers[2]=S3IntelligentTiering +tiers[3]=S3OneZoneIA +tiers[4]=S3GlacierInstantRetrieval +tiers[5]=S3GlacierFlexibleRetrieval +tiers[6]=S3GlacierDeepArchive +tiers[7]=S3Rrs + +declare -A AwsS3apiStorageClass +AwsS3apiStorageClass[0]=STANDARD +AwsS3apiStorageClass[1]=STANDARD_IA +AwsS3apiStorageClass[2]=INTELLIGENT_TIERING +AwsS3apiStorageClass[3]=ONEZONE_IA +AwsS3apiStorageClass[4]=GLACIER_IR +AwsS3apiStorageClass[5]=GLACIER +AwsS3apiStorageClass[6]=DEEP_ARCHIVE +AwsS3apiStorageClass[7]=REDUCED_REDUNDANCY + +test_tier() +{ +#$1 : tier_index +#$2 : tier value + +echo " == checking $2 in Vol$1 ==" + +$bperl -e 'add_attribute("$conf/bacula-sd.conf", "StorageClass", "'$2'", "Cloud")' + +cat <${cwd}/tmp/bconcmds +@output /dev/null +messages +@$out ${cwd}/tmp/log$1.out +setdebug tags=cloud level=50 trace=1 storage +label storage=File volume=Vol$1 +END_OF_DATA + +# do label +run_bacula + +sleep 2 +check_for_zombie_jobs storage=File +stop_bacula + +# retrieve the part tier with aws cli +aws s3api get-object-attributes --bucket $BucketName --key Vol$1/part.1 --object-attributes StorageClass | grep ${AwsS3apiStorageClass[$1]} + +if test $? -ne 0; then + echo "Error: could not find correct storage class $2=${AwsS3apiStorageClass[$1]} in $BucketName/Vol$1/part.1" + estat=1 +else + echo "$BucketName/Vol$1/part.1 storage class is $2=${AwsS3apiStorageClass[$1]} : OK." +fi +} + +estat=0 + +for key in "${!tiers[@]}"; do test_tier $key ${tiers[$key]}; done + +end_test diff --git a/regress/tests/cloud-object-tier-test b/regress/tests/cloud-object-tier-test deleted file mode 100755 index 744a3ad79..000000000 --- a/regress/tests/cloud-object-tier-test +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash -# -# Copyright (C) 2000-2021 Kern Sibbald -# Copyright (C) 2021-2022 Bacula Systems SA -# License: BSD 2-Clause; see file LICENSE-FOSS -# -# Cloud test. We run backups and restore with the "truncate cache" command in-between -# -TestName="cloud-object-tier-test" -JobName=NightlySave -. scripts/functions - -require_cloud - -#config is required for cloud cleanup -scripts/copy-test-confs -scripts/cleanup - -FORCE_FILE_SET=${FORCE_FILE_SET:-"${cwd}/build"} -echo "$FORCE_FILE_SET" >${cwd}/tmp/file-list - -start_test - -$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumPartSize", "10000000", "Device")' -$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumUploadBandwidth", "4MB/s", "Cloud")' -$bperl -e 'add_attribute("$conf/bacula-sd.conf", "MaximumDownloadBandwidth", "4MB/s", "Cloud")' - -# {"S3Standard", S3_STANDARD}, -# {"S3StandardIA", S3_STANDARD_IA}, -# {"S3IntelligentTiering", S3_INTELLIGENT_TIERING}, -# {"S3OneZoneIA", S3_ONE_ZONE_IA}, -# {"S3GlacierInstantRetrieval", S3_GLACIER_INSTANT_RETRIEVAL}, -# {"S3GlacierFlexibleRetrieval", S3_GLACIER_FLEXIBLE_RETRIEVAL}, -# {"S3GlacierDeepArchive", S3_GLACIER_DEEP_ARCHIVE}, -# {"S3Rrs", S3_RRS}, - -declare -A tiers -tiers[0]=S3Standard -tiers[1]=S3StandardIA -tiers[2]=S3IntelligentTiering -tiers[3]=S3OneZoneIA -tiers[4]=S3GlacierInstantRetrieval -tiers[5]=S3GlacierFlexibleRetrieval -tiers[6]=S3GlacierDeepArchive -tiers[7]=S3Rrs - -test_tier() -{ -#$1 : tier_index -#$2 : tier value - -echo " == checking tier[$1]: $2 ==" - -$bperl -e 'add_attribute("$conf/bacula-sd.conf", "StorageClass", "'$2'", "Cloud")' - -cat <${cwd}/tmp/bconcmds -@output /dev/null -messages -@$out ${cwd}/tmp/log$1.out -setdebug tags=cloud level=50 trace=1 storage -label storage=File volume=Vol$1 -END_OF_DATA - -# do label -run_bacula - -#cat <${cwd}/tmp/bconcmds -#run job=$JobName level=Full yes -#wait -#messages -#END_OF_DATA - -#run_bconsole - -sleep 2 -check_for_zombie_jobs storage=File -stop_bacula - -# must check if the versioned part has been cleaned up -grep "objects_default_tier: $1" $working/*-sd.trace > /dev/null -if test $? -ne 0; then - echo "Error: could not find correct tier $2" - estat=1 -else - echo "tier $2=$1 OK." -fi -} - -estat=0 - -for key in "${!tiers[@]}"; do test_tier $key ${tiers[$key]}; done - -end_test