]> git.ipfire.org Git - thirdparty/pdns.git/commitdiff
Use rclone instead of custom sync script to publish documentation to AWS S3 bucket
authormilzi234 <fla@synapps.de>
Fri, 14 Feb 2025 09:07:03 +0000 (10:07 +0100)
committermilzi234 <fla@synapps.de>
Fri, 14 Feb 2025 09:08:35 +0000 (10:08 +0100)
.github/scripts/publish.js [deleted file]
.github/workflows/documentation.yml

diff --git a/.github/scripts/publish.js b/.github/scripts/publish.js
deleted file mode 100644 (file)
index 3012475..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
-const { CloudFrontClient, CreateInvalidationCommand } = require("@aws-sdk/client-cloudfront");
-const fs = require('fs');
-const path = require('path');
-
-// Configure AWS SDK
-const s3Client = new S3Client({ region: process.env.AWS_REGION });
-const cloudFrontClient = new CloudFrontClient({ region: process.env.AWS_REGION });
-
-async function uploadToS3(bucketName, sourceDir, destDir = '') {
-  const files = fs.readdirSync(sourceDir);
-
-  for (const file of files) {
-    const filePath = path.join(sourceDir, file);
-    const key = path.join(destDir, file);
-
-    if (fs.statSync(filePath).isDirectory()) {
-      await uploadToS3(bucketName, filePath, key);
-    } else {
-      const fileContent = fs.readFileSync(filePath);
-      const command = new PutObjectCommand({
-        Bucket: bucketName,
-        Key: key,
-        Body: fileContent,
-        ContentType: getContentType(file),
-      });
-      await s3Client.send(command);
-    }
-  }
-}
-
-function getContentType(filename) {
-  const ext = path.extname(filename).toLowerCase();
-  switch (ext) {
-    case '.html': return 'text/html';
-    case '.css': return 'text/css';
-    case '.js': return 'application/javascript';
-    case '.json': return 'application/json';
-    case '.png': return 'image/png';
-    case '.jpg': case '.jpeg': return 'image/jpeg';
-    default: return 'application/octet-stream';
-  }
-}
-
-async function invalidateCloudFront(distributionId, paths) {
-  const command = new CreateInvalidationCommand({
-    DistributionId: distributionId,
-    InvalidationBatch: {
-      CallerReference: Date.now().toString(),
-      Paths: {
-        Quantity: paths.length,
-        Items: paths,
-      },
-    },
-  });
-  await cloudFrontClient.send(command);
-}
-
-async function publishToSite(site, sourceDir, targetDir = '') {
-  const bucketName = process.env.AWS_S3_BUCKET_DOCS;
-  let distributionId, siteDir;
-
-  if (site === 'dnsdist.org') {
-    distributionId = process.env.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST;
-    siteDir = 'dnsdist.org';
-  } else if (site === 'docs.powerdns.com') {
-    distributionId = process.env.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS;
-    siteDir = 'docs.powerdns.com';
-  } else {
-    throw new Error('Invalid site specified');
-  }
-
-  const fullTargetDir = path.join(siteDir, targetDir);
-  await uploadToS3(bucketName, sourceDir, fullTargetDir);
-
-  // Invalidate CloudFront cache
-  await invalidateCloudFront(distributionId, ['/*']);
-
-  console.log(`Published from ${sourceDir} to ${site}${targetDir ? '/' + targetDir : ''}`);
-}
-
-async function main() {
-  const args = process.argv.slice(2);
-  if (args[0] === 'publish') {
-    if (args.length < 3 || args.length > 4) {
-      console.log('Usage: node publish.js publish <SITE> <SOURCE_DIR> [TARGET_DIR]');
-      return;
-    }
-    const [, site, sourceDir, targetDir] = args;
-    await publishToSite(site, sourceDir, targetDir);
-  } else {
-    console.log('Usage: node publish.js publish <SITE> <SOURCE_DIR> [TARGET_DIR]');
-  }
-}
-
-main().catch(console.error);
index 163d9d5a117afef2b7412dd3763de64f621c5d5d..e430c7787eefc6b6e6331d460c428ee3db384f15 100644 (file)
@@ -265,20 +265,34 @@ jobs:
             exit 1
           fi
       - uses: actions/checkout@v4
-      - uses: actions/setup-node@v4
+      # Configure AWS Credentials
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v4
         with:
-          node-version: '20'
-      - name: Install AWS SDK v3
-        run: npm install @aws-sdk/client-s3 @aws-sdk/client-cloudfront
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: ${{ vars.AWS_REGION }}
+      # Install rclone
+      - name: Install rclone
+        run: |
+          curl https://rclone.org/install.sh | sudo bash
+      - name: Configure rclone
+        run: |
+          mkdir -p ~/.config/rclone
+          cat > ~/.config/rclone/rclone.conf << EOF
+          [docs-s3]
+          type = s3
+          provider = AWS
+          env_auth = true
+          region = ${{ vars.AWS_REGION }}
+          location_constraint = ${{ vars.AWS_REGION }}
+          EOF
       - name: Download artifacts
         uses: actions/download-artifact@v4
         with:
           path: artifacts
       - name: Publish to AWS
         env:
-          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
-          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-          AWS_REGION: ${{ vars.AWS_REGION }}
           AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
           AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
           AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
@@ -286,15 +300,31 @@ jobs:
           if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ]; then
             tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar
             cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf dnsdist-html-docs/
-            node .github/scripts/publish.js publish dnsdist.org dnsdist-html-docs /
+            rclone sync --progress dnsdist-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/dnsdist.org/
+            # Invalidate CloudFront cache for DNSdist
+            aws cloudfront create-invalidation \
+              --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST \
+              --paths "/*"
           fi
           if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ]; then
             tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar
             cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf rec-html-docs/
-            node .github/scripts/publish.js publish docs.powerdns.com rec-html-docs /recursor
+            rclone sync --progress rec-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/recursor/
+
             tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar
             cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
-            node .github/scripts/publish.js publish docs.powerdns.com auth-html-docs /authoritative
+            rclone sync --progress auth-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/authoritative/
+
             tar -xf artifacts/website-${{needs.build-docs.outputs.pdns_version}}/website.tar
-            node .github/scripts/publish.js publish docs.powerdns.com website/docs.powerdns.com /
+            rclone sync --progress website/docs.powerdns.com/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/
+
+            # Invalidate CloudFront cache for docs.powerdns.com
+            aws cloudfront create-invalidation \
+              --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS \
+              --paths "/*"
           fi
+      # Cleanup credentials
+      - name: Cleanup rclone config
+        if: always()
+        run: |
+          rm -f ~/.config/rclone/rclone.conf