]> git.ipfire.org Git - thirdparty/fastapi/fastapi.git/commitdiff
👷 Refactor Docs CI, run in multiple workers with a dynamic matrix to optimize speed...
authorSebastián Ramírez <tiangolo@gmail.com>
Sat, 24 Jun 2023 00:00:12 +0000 (02:00 +0200)
committerGitHub <noreply@github.com>
Sat, 24 Jun 2023 00:00:12 +0000 (02:00 +0200)
.github/workflows/build-docs.yml
.github/workflows/deploy-docs.yml [moved from .github/workflows/preview-docs.yml with 79% similarity]
.gitignore
scripts/docs.py
scripts/zip-docs.sh [deleted file]

index fb1fa6f098a0e94506a02046b8caf1cc88400422..c2880ef711418ec0f08a4247e4a5085d2c11b43c 100644 (file)
@@ -23,15 +23,45 @@ jobs:
       id: filter
       with:
         filters: |
-            docs:
-              - README.md
-              - docs/**
-              - docs_src/**
-              - requirements-docs.txt
+          docs:
+            - README.md
+            - docs/**
+            - docs_src/**
+            - requirements-docs.txt
+  langs:
+    needs:
+      - changes
+    runs-on: ubuntu-latest
+    outputs:
+      langs: ${{ steps.show-langs.outputs.langs }}
+    steps:
+      - uses: actions/checkout@v3
+      - name: Set up Python
+        uses: actions/setup-python@v4
+        with:
+          python-version: "3.11"
+      - uses: actions/cache@v3
+        id: cache
+        with:
+          path: ${{ env.pythonLocation }}
+          key: ${{ runner.os }}-python-docs-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml', 'requirements-docs.txt') }}-v03
+      - name: Install docs extras
+        if: steps.cache.outputs.cache-hit != 'true'
+        run: pip install -r requirements-docs.txt
+      - name: Export Language Codes
+        id: show-langs
+        run: |
+          echo "langs=$(python ./scripts/docs.py langs-json)" >> $GITHUB_OUTPUT
+
   build-docs:
-    needs: changes
+    needs:
+      - changes
+      - langs
     if: ${{ needs.changes.outputs.docs == 'true' }}
     runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        lang: ${{ fromJson(needs.langs.outputs.langs) }}
     steps:
       - name: Dump GitHub context
         env:
@@ -53,21 +83,24 @@ jobs:
       - name: Install Material for MkDocs Insiders
         if: ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false ) && steps.cache.outputs.cache-hit != 'true'
         run: pip install git+https://${{ secrets.ACTIONS_TOKEN }}@github.com/squidfunk/mkdocs-material-insiders.git
+      - name: Update Languages
+        run: python ./scripts/docs.py update-languages
       - name: Build Docs
-        run: python ./scripts/docs.py build-all
-      - name: Zip docs
-        run: bash ./scripts/zip-docs.sh
+        run: python ./scripts/docs.py build-lang ${{ matrix.lang }}
       - uses: actions/upload-artifact@v3
         with:
-          name: docs-zip
-          path: ./site/docs.zip
-      - name: Deploy to Netlify
-        uses: nwtgck/actions-netlify@v2.0.0
+          name: docs-site
+          path: ./site/**
+
+  # https://github.com/marketplace/actions/alls-green#why
+  docs-all-green:  # This job does nothing and is only used for the branch protection
+    if: always()
+    needs:
+      - build-docs
+    runs-on: ubuntu-latest
+    steps:
+      - name: Decide whether the needed jobs succeeded or failed
+        uses: re-actors/alls-green@release/v1
         with:
-          publish-dir: './site'
-          production-branch: master
-          github-token: ${{ secrets.FASTAPI_BUILD_DOCS_NETLIFY }}
-          enable-commit-comment: false
-        env:
-          NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
-          NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
+          jobs: ${{ toJSON(needs) }}
+          allowed-skips: build-docs
similarity index 79%
rename from .github/workflows/preview-docs.yml
rename to .github/workflows/deploy-docs.yml
index da98f5d2bdd4ddbb92d49e5d0b37feb2017ca2f1..312d835af87246bf084f8b42f70e75ba9d1af53a 100644 (file)
@@ -1,4 +1,4 @@
-name: Preview Docs
+name: Deploy Docs
 on:
   workflow_run:
     workflows:
@@ -7,9 +7,13 @@ on:
       - completed
 
 jobs:
-  preview-docs:
+  deploy-docs:
     runs-on: ubuntu-latest
     steps:
+      - name: Dump GitHub context
+        env:
+          GITHUB_CONTEXT: ${{ toJson(github) }}
+        run: echo "$GITHUB_CONTEXT"
       - uses: actions/checkout@v3
       - name: Clean site
         run: |
@@ -23,21 +27,15 @@ jobs:
           github_token: ${{ secrets.FASTAPI_PREVIEW_DOCS_DOWNLOAD_ARTIFACTS }}
           workflow: build-docs.yml
           run_id: ${{ github.event.workflow_run.id }}
-          name: docs-zip
+          name: docs-site
           path: ./site/
-      - name: Unzip docs
-        if: steps.download.outputs.found_artifact == 'true'
-        run: |
-          cd ./site
-          unzip docs.zip
-          rm -f docs.zip
       - name: Deploy to Netlify
         if: steps.download.outputs.found_artifact == 'true'
         id: netlify
         uses: nwtgck/actions-netlify@v2.0.0
         with:
           publish-dir: './site'
-          production-deploy: false
+          production-deploy: ${{ github.event.workflow_run.head_repository.full_name == github.repository && github.event.workflow_run.head_branch == 'master' }}
           github-token: ${{ secrets.FASTAPI_PREVIEW_DOCS_NETLIFY }}
           enable-commit-comment: false
         env:
index a26bb5cd648e719a54d60b1eae8f138e0b9074be..3cb64c0476f0f53b7db4ab350bfdf0decb073319 100644 (file)
@@ -16,6 +16,7 @@ Pipfile.lock
 env3.*
 env
 docs_build
+site_build
 venv
 docs.zip
 archive.zip
index e0953b8edb551dfcaed24ccf61b19033d6ba231c..c464f8dbea7a2fb1ab9f074dc3dddc69ab905dd2 100644 (file)
@@ -1,3 +1,4 @@
+import json
 import os
 import re
 import shutil
@@ -133,75 +134,83 @@ def build_lang(
     build_lang_path = build_dir_path / lang
     en_lang_path = Path("docs/en")
     site_path = Path("site").absolute()
+    build_site_path = Path("site_build").absolute()
+    build_site_dist_path = build_site_path / lang
     if lang == "en":
         dist_path = site_path
     else:
         dist_path: Path = site_path / lang
     shutil.rmtree(build_lang_path, ignore_errors=True)
     shutil.copytree(lang_path, build_lang_path)
-    shutil.copytree(en_docs_path / "data", build_lang_path / "data")
-    overrides_src = en_docs_path / "overrides"
-    overrides_dest = build_lang_path / "overrides"
-    for path in overrides_src.iterdir():
-        dest_path = overrides_dest / path.name
-        if not dest_path.exists():
-            shutil.copy(path, dest_path)
-    en_config_path: Path = en_lang_path / mkdocs_name
-    en_config: dict = mkdocs.utils.yaml_load(en_config_path.read_text(encoding="utf-8"))
-    nav = en_config["nav"]
-    lang_config_path: Path = lang_path / mkdocs_name
-    lang_config: dict = mkdocs.utils.yaml_load(
-        lang_config_path.read_text(encoding="utf-8")
-    )
-    lang_nav = lang_config["nav"]
-    # Exclude first 2 entries FastAPI and Languages, for custom handling
-    use_nav = nav[2:]
-    lang_use_nav = lang_nav[2:]
-    file_to_nav = get_file_to_nav_map(use_nav)
-    sections = get_sections(use_nav)
-    lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
-    use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
-    for file in file_to_nav:
-        file_path = Path(file)
-        lang_file_path: Path = build_lang_path / "docs" / file_path
-        en_file_path: Path = en_lang_path / "docs" / file_path
-        lang_file_path.parent.mkdir(parents=True, exist_ok=True)
-        if not lang_file_path.is_file():
-            en_text = en_file_path.read_text(encoding="utf-8")
-            lang_text = get_text_with_translate_missing(en_text)
-            lang_file_path.write_text(lang_text, encoding="utf-8")
-            file_key = file_to_nav[file]
-            use_lang_file_to_nav[file] = file_key
-            if file_key:
-                composite_key = ()
-                new_key = ()
-                for key_part in file_key:
-                    composite_key += (key_part,)
-                    key_first_file = sections[composite_key]
-                    if key_first_file in lang_file_to_nav:
-                        new_key = lang_file_to_nav[key_first_file]
-                    else:
-                        new_key += (key_part,)
-                use_lang_file_to_nav[file] = new_key
-    key_to_section = {(): []}
-    for file, orig_file_key in file_to_nav.items():
-        if file in use_lang_file_to_nav:
-            file_key = use_lang_file_to_nav[file]
-        else:
-            file_key = orig_file_key
-        section = get_key_section(key_to_section=key_to_section, key=file_key)
-        section.append(file)
-    new_nav = key_to_section[()]
-    export_lang_nav = [lang_nav[0], nav[1]] + new_nav
-    lang_config["nav"] = export_lang_nav
-    build_lang_config_path: Path = build_lang_path / mkdocs_name
-    build_lang_config_path.write_text(
-        yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True),
-        encoding="utf-8",
-    )
+    if not lang == "en":
+        shutil.copytree(en_docs_path / "data", build_lang_path / "data")
+        overrides_src = en_docs_path / "overrides"
+        overrides_dest = build_lang_path / "overrides"
+        for path in overrides_src.iterdir():
+            dest_path = overrides_dest / path.name
+            if not dest_path.exists():
+                shutil.copy(path, dest_path)
+        en_config_path: Path = en_lang_path / mkdocs_name
+        en_config: dict = mkdocs.utils.yaml_load(
+            en_config_path.read_text(encoding="utf-8")
+        )
+        nav = en_config["nav"]
+        lang_config_path: Path = lang_path / mkdocs_name
+        lang_config: dict = mkdocs.utils.yaml_load(
+            lang_config_path.read_text(encoding="utf-8")
+        )
+        lang_nav = lang_config["nav"]
+        # Exclude first 2 entries FastAPI and Languages, for custom handling
+        use_nav = nav[2:]
+        lang_use_nav = lang_nav[2:]
+        file_to_nav = get_file_to_nav_map(use_nav)
+        sections = get_sections(use_nav)
+        lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
+        use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
+        for file in file_to_nav:
+            file_path = Path(file)
+            lang_file_path: Path = build_lang_path / "docs" / file_path
+            en_file_path: Path = en_lang_path / "docs" / file_path
+            lang_file_path.parent.mkdir(parents=True, exist_ok=True)
+            if not lang_file_path.is_file():
+                en_text = en_file_path.read_text(encoding="utf-8")
+                lang_text = get_text_with_translate_missing(en_text)
+                lang_file_path.write_text(lang_text, encoding="utf-8")
+                file_key = file_to_nav[file]
+                use_lang_file_to_nav[file] = file_key
+                if file_key:
+                    composite_key = ()
+                    new_key = ()
+                    for key_part in file_key:
+                        composite_key += (key_part,)
+                        key_first_file = sections[composite_key]
+                        if key_first_file in lang_file_to_nav:
+                            new_key = lang_file_to_nav[key_first_file]
+                        else:
+                            new_key += (key_part,)
+                    use_lang_file_to_nav[file] = new_key
+        key_to_section = {(): []}
+        for file, orig_file_key in file_to_nav.items():
+            if file in use_lang_file_to_nav:
+                file_key = use_lang_file_to_nav[file]
+            else:
+                file_key = orig_file_key
+            section = get_key_section(key_to_section=key_to_section, key=file_key)
+            section.append(file)
+        new_nav = key_to_section[()]
+        export_lang_nav = [lang_nav[0], nav[1]] + new_nav
+        lang_config["nav"] = export_lang_nav
+        build_lang_config_path: Path = build_lang_path / mkdocs_name
+        build_lang_config_path.write_text(
+            yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True),
+            encoding="utf-8",
+        )
     current_dir = os.getcwd()
     os.chdir(build_lang_path)
-    subprocess.run(["mkdocs", "build", "--site-dir", dist_path], check=True)
+    shutil.rmtree(build_site_dist_path, ignore_errors=True)
+    shutil.rmtree(dist_path, ignore_errors=True)
+    subprocess.run(["mkdocs", "build", "--site-dir", build_site_dist_path], check=True)
+    shutil.copytree(build_site_dist_path, dist_path, dirs_exist_ok=True)
     os.chdir(current_dir)
     typer.secho(f"Successfully built docs for: {lang}", color=typer.colors.GREEN)
 
@@ -271,18 +280,8 @@ def build_all():
     Build mkdocs site for en, and then build each language inside, end result is located
     at directory ./site/ with each language inside.
     """
-    site_path = Path("site").absolute()
     update_languages(lang=None)
-    current_dir = os.getcwd()
-    os.chdir(en_docs_path)
-    typer.echo("Building docs for: en")
-    subprocess.run(["mkdocs", "build", "--site-dir", site_path], check=True)
-    os.chdir(current_dir)
-    langs = []
-    for lang in get_lang_paths():
-        if lang == en_docs_path or not lang.is_dir():
-            continue
-        langs.append(lang.name)
+    langs = [lang.name for lang in get_lang_paths() if lang.is_dir()]
     cpu_count = os.cpu_count() or 1
     process_pool_size = cpu_count * 4
     typer.echo(f"Using process pool size: {process_pool_size}")
@@ -397,6 +396,15 @@ def update_config(lang: str):
     )
 
 
+@app.command()
+def langs_json():
+    langs = []
+    for lang_path in get_lang_paths():
+        if lang_path.is_dir():
+            langs.append(lang_path.name)
+    print(json.dumps(langs))
+
+
 def get_key_section(
     *, key_to_section: Dict[Tuple[str, ...], list], key: Tuple[str, ...]
 ) -> list:
diff --git a/scripts/zip-docs.sh b/scripts/zip-docs.sh
deleted file mode 100644 (file)
index 47c3b09..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-set -x
-set -e
-
-cd ./site
-
-if [ -f docs.zip ]; then
-    rm -rf docs.zip
-fi
-zip -r docs.zip ./