id: filter
with:
filters: |
- docs:
- - README.md
- - docs/**
- - docs_src/**
- - requirements-docs.txt
+ docs:
+ - README.md
+ - docs/**
+ - docs_src/**
+ - requirements-docs.txt
+ langs:
+ needs:
+ - changes
+ runs-on: ubuntu-latest
+ outputs:
+ langs: ${{ steps.show-langs.outputs.langs }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.11"
+ - uses: actions/cache@v3
+ id: cache
+ with:
+ path: ${{ env.pythonLocation }}
+ key: ${{ runner.os }}-python-docs-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml', 'requirements-docs.txt') }}-v03
+ - name: Install docs extras
+ if: steps.cache.outputs.cache-hit != 'true'
+ run: pip install -r requirements-docs.txt
+ - name: Export Language Codes
+ id: show-langs
+ run: |
+ echo "langs=$(python ./scripts/docs.py langs-json)" >> $GITHUB_OUTPUT
+
build-docs:
- needs: changes
+ needs:
+ - changes
+ - langs
if: ${{ needs.changes.outputs.docs == 'true' }}
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ lang: ${{ fromJson(needs.langs.outputs.langs) }}
steps:
- name: Dump GitHub context
env:
- name: Install Material for MkDocs Insiders
if: ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false ) && steps.cache.outputs.cache-hit != 'true'
run: pip install git+https://${{ secrets.ACTIONS_TOKEN }}@github.com/squidfunk/mkdocs-material-insiders.git
+ - name: Update Languages
+ run: python ./scripts/docs.py update-languages
- name: Build Docs
- run: python ./scripts/docs.py build-all
- - name: Zip docs
- run: bash ./scripts/zip-docs.sh
+ run: python ./scripts/docs.py build-lang ${{ matrix.lang }}
- uses: actions/upload-artifact@v3
with:
- name: docs-zip
- path: ./site/docs.zip
- - name: Deploy to Netlify
- uses: nwtgck/actions-netlify@v2.0.0
+ name: docs-site
+ path: ./site/**
+
+ # https://github.com/marketplace/actions/alls-green#why
+ docs-all-green: # This job does nothing and is only used for the branch protection
+ if: always()
+ needs:
+ - build-docs
+ runs-on: ubuntu-latest
+ steps:
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
with:
- publish-dir: './site'
- production-branch: master
- github-token: ${{ secrets.FASTAPI_BUILD_DOCS_NETLIFY }}
- enable-commit-comment: false
- env:
- NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
- NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
+ jobs: ${{ toJSON(needs) }}
+ allowed-skips: build-docs
+import json
import os
import re
import shutil
build_lang_path = build_dir_path / lang
en_lang_path = Path("docs/en")
site_path = Path("site").absolute()
+ build_site_path = Path("site_build").absolute()
+ build_site_dist_path = build_site_path / lang
if lang == "en":
dist_path = site_path
else:
dist_path: Path = site_path / lang
shutil.rmtree(build_lang_path, ignore_errors=True)
shutil.copytree(lang_path, build_lang_path)
- shutil.copytree(en_docs_path / "data", build_lang_path / "data")
- overrides_src = en_docs_path / "overrides"
- overrides_dest = build_lang_path / "overrides"
- for path in overrides_src.iterdir():
- dest_path = overrides_dest / path.name
- if not dest_path.exists():
- shutil.copy(path, dest_path)
- en_config_path: Path = en_lang_path / mkdocs_name
- en_config: dict = mkdocs.utils.yaml_load(en_config_path.read_text(encoding="utf-8"))
- nav = en_config["nav"]
- lang_config_path: Path = lang_path / mkdocs_name
- lang_config: dict = mkdocs.utils.yaml_load(
- lang_config_path.read_text(encoding="utf-8")
- )
- lang_nav = lang_config["nav"]
- # Exclude first 2 entries FastAPI and Languages, for custom handling
- use_nav = nav[2:]
- lang_use_nav = lang_nav[2:]
- file_to_nav = get_file_to_nav_map(use_nav)
- sections = get_sections(use_nav)
- lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
- use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
- for file in file_to_nav:
- file_path = Path(file)
- lang_file_path: Path = build_lang_path / "docs" / file_path
- en_file_path: Path = en_lang_path / "docs" / file_path
- lang_file_path.parent.mkdir(parents=True, exist_ok=True)
- if not lang_file_path.is_file():
- en_text = en_file_path.read_text(encoding="utf-8")
- lang_text = get_text_with_translate_missing(en_text)
- lang_file_path.write_text(lang_text, encoding="utf-8")
- file_key = file_to_nav[file]
- use_lang_file_to_nav[file] = file_key
- if file_key:
- composite_key = ()
- new_key = ()
- for key_part in file_key:
- composite_key += (key_part,)
- key_first_file = sections[composite_key]
- if key_first_file in lang_file_to_nav:
- new_key = lang_file_to_nav[key_first_file]
- else:
- new_key += (key_part,)
- use_lang_file_to_nav[file] = new_key
- key_to_section = {(): []}
- for file, orig_file_key in file_to_nav.items():
- if file in use_lang_file_to_nav:
- file_key = use_lang_file_to_nav[file]
- else:
- file_key = orig_file_key
- section = get_key_section(key_to_section=key_to_section, key=file_key)
- section.append(file)
- new_nav = key_to_section[()]
- export_lang_nav = [lang_nav[0], nav[1]] + new_nav
- lang_config["nav"] = export_lang_nav
- build_lang_config_path: Path = build_lang_path / mkdocs_name
- build_lang_config_path.write_text(
- yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True),
- encoding="utf-8",
- )
+ if not lang == "en":
+ shutil.copytree(en_docs_path / "data", build_lang_path / "data")
+ overrides_src = en_docs_path / "overrides"
+ overrides_dest = build_lang_path / "overrides"
+ for path in overrides_src.iterdir():
+ dest_path = overrides_dest / path.name
+ if not dest_path.exists():
+ shutil.copy(path, dest_path)
+ en_config_path: Path = en_lang_path / mkdocs_name
+ en_config: dict = mkdocs.utils.yaml_load(
+ en_config_path.read_text(encoding="utf-8")
+ )
+ nav = en_config["nav"]
+ lang_config_path: Path = lang_path / mkdocs_name
+ lang_config: dict = mkdocs.utils.yaml_load(
+ lang_config_path.read_text(encoding="utf-8")
+ )
+ lang_nav = lang_config["nav"]
+ # Exclude first 2 entries FastAPI and Languages, for custom handling
+ use_nav = nav[2:]
+ lang_use_nav = lang_nav[2:]
+ file_to_nav = get_file_to_nav_map(use_nav)
+ sections = get_sections(use_nav)
+ lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
+ use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
+ for file in file_to_nav:
+ file_path = Path(file)
+ lang_file_path: Path = build_lang_path / "docs" / file_path
+ en_file_path: Path = en_lang_path / "docs" / file_path
+ lang_file_path.parent.mkdir(parents=True, exist_ok=True)
+ if not lang_file_path.is_file():
+ en_text = en_file_path.read_text(encoding="utf-8")
+ lang_text = get_text_with_translate_missing(en_text)
+ lang_file_path.write_text(lang_text, encoding="utf-8")
+ file_key = file_to_nav[file]
+ use_lang_file_to_nav[file] = file_key
+ if file_key:
+ composite_key = ()
+ new_key = ()
+ for key_part in file_key:
+ composite_key += (key_part,)
+ key_first_file = sections[composite_key]
+ if key_first_file in lang_file_to_nav:
+ new_key = lang_file_to_nav[key_first_file]
+ else:
+ new_key += (key_part,)
+ use_lang_file_to_nav[file] = new_key
+ key_to_section = {(): []}
+ for file, orig_file_key in file_to_nav.items():
+ if file in use_lang_file_to_nav:
+ file_key = use_lang_file_to_nav[file]
+ else:
+ file_key = orig_file_key
+ section = get_key_section(key_to_section=key_to_section, key=file_key)
+ section.append(file)
+ new_nav = key_to_section[()]
+ export_lang_nav = [lang_nav[0], nav[1]] + new_nav
+ lang_config["nav"] = export_lang_nav
+ build_lang_config_path: Path = build_lang_path / mkdocs_name
+ build_lang_config_path.write_text(
+ yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True),
+ encoding="utf-8",
+ )
current_dir = os.getcwd()
os.chdir(build_lang_path)
- subprocess.run(["mkdocs", "build", "--site-dir", dist_path], check=True)
+ shutil.rmtree(build_site_dist_path, ignore_errors=True)
+ shutil.rmtree(dist_path, ignore_errors=True)
+ subprocess.run(["mkdocs", "build", "--site-dir", build_site_dist_path], check=True)
+ shutil.copytree(build_site_dist_path, dist_path, dirs_exist_ok=True)
os.chdir(current_dir)
typer.secho(f"Successfully built docs for: {lang}", color=typer.colors.GREEN)
Build mkdocs site for en, and then build each language inside, end result is located
at directory ./site/ with each language inside.
"""
- site_path = Path("site").absolute()
update_languages(lang=None)
- current_dir = os.getcwd()
- os.chdir(en_docs_path)
- typer.echo("Building docs for: en")
- subprocess.run(["mkdocs", "build", "--site-dir", site_path], check=True)
- os.chdir(current_dir)
- langs = []
- for lang in get_lang_paths():
- if lang == en_docs_path or not lang.is_dir():
- continue
- langs.append(lang.name)
+ langs = [lang.name for lang in get_lang_paths() if lang.is_dir()]
cpu_count = os.cpu_count() or 1
process_pool_size = cpu_count * 4
typer.echo(f"Using process pool size: {process_pool_size}")
)
+@app.command()
+def langs_json():
+ langs = []
+ for lang_path in get_lang_paths():
+ if lang_path.is_dir():
+ langs.append(lang_path.name)
+ print(json.dumps(langs))
+
+
def get_key_section(
*, key_to_section: Dict[Tuple[str, ...], list], key: Tuple[str, ...]
) -> list: