diff options
Diffstat (limited to 'docs/utils.py')
-rw-r--r-- | docs/utils.py | 119 |
1 files changed, 110 insertions, 9 deletions
diff --git a/docs/utils.py b/docs/utils.py index 9116c130..bb8074ba 100644 --- a/docs/utils.py +++ b/docs/utils.py @@ -1,15 +1,31 @@ """Utilities used in generating docs.""" import ast -import importlib +import importlib.util import inspect +import os +import subprocess import typing from pathlib import Path -PROJECT_ROOT = Path(__file__).parent.parent +import docutils.nodes +import docutils.parsers.rst.states +import git +import releases +import sphinx.util.logging +logger = sphinx.util.logging.getLogger(__name__) -def linkcode_resolve(source_url: str, domain: str, info: dict[str, str]) -> typing.Optional[str]: + +def get_build_root() -> Path: + """Get the project root folder for the current build.""" + root = Path.cwd() + if root.name == "docs": + root = root.parent + return root + + +def linkcode_resolve(repo_link: str, domain: str, info: dict[str, str]) -> typing.Optional[str]: """ Function called by linkcode to get the URL for a given resource. @@ -21,7 +37,25 @@ def linkcode_resolve(source_url: str, domain: str, info: dict[str, str]) -> typi symbol_name = info["fullname"] - module = importlib.import_module(info["module"]) + build_root = get_build_root() + + # Import the package to find files + origin = build_root / info["module"].replace(".", "/") + search_locations = [] + + if origin.is_dir(): + search_locations.append(origin.absolute().as_posix()) + origin = origin / "__init__.py" + else: + origin = Path(origin.absolute().as_posix() + ".py") + if not origin.exists(): + raise Exception(f"Could not find `{info['module']}` as a package or file.") + + # We can't use a normal import (importlib.import_module), because the module can conflict with another copy + # in multiversion builds. We load the module from the file location instead + spec = importlib.util.spec_from_file_location(info["module"], origin, submodule_search_locations=search_locations) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) symbol = [module] for name in symbol_name.split("."): @@ -58,9 +92,15 @@ def linkcode_resolve(source_url: str, domain: str, info: dict[str, str]) -> typi start += offset end += offset - file = Path(inspect.getfile(module)).relative_to(PROJECT_ROOT).as_posix() + file = Path(inspect.getfile(module)).relative_to(build_root).as_posix() + + try: + sha = git.Repo(build_root).commit().hexsha + except git.InvalidGitRepositoryError: + # We are building a historical version, no git data available + sha = build_root.name - url = f"{source_url}/{file}#L{start}" + url = f"{repo_link}/blob/{sha}/{file}#L{start}" if end != start: url += f"-L{end}" @@ -71,7 +111,7 @@ def cleanup() -> None: """Remove unneeded autogenerated doc files, and clean up others.""" included = __get_included() - for file in (PROJECT_ROOT / "docs" / "output").iterdir(): + for file in (get_build_root() / "docs" / "output").iterdir(): if file.name in ("botcore.rst", "botcore.exts.rst", "botcore.utils.rst") and file.name in included: content = file.read_text(encoding="utf-8").splitlines(keepends=True) @@ -92,7 +132,6 @@ def cleanup() -> None: else: # These are files that have not been explicitly included in the docs via __all__ - print("Deleted file", file.name) file.unlink() continue @@ -101,6 +140,24 @@ def cleanup() -> None: file.write_text(content, encoding="utf-8") +def build_api_doc() -> None: + """Generate auto-module directives using apidoc.""" + cmd = os.getenv("APIDOC_COMMAND") or "sphinx-apidoc -o docs/output botcore -feM" + cmd = cmd.split() + + build_root = get_build_root() + output_folder = build_root / cmd[cmd.index("-o") + 1] + + if output_folder.exists(): + logger.info(f"Skipping api-doc for {output_folder.as_posix()} as it already exists.") + return + + result = subprocess.run(cmd, cwd=build_root, stdout=subprocess.PIPE, check=True, env=os.environ) + logger.debug("api-doc Output:\n" + result.stdout.decode(encoding="utf-8") + "\n") + + cleanup() + + def __get_included() -> set[str]: """Get a list of files that should be included in the final build.""" @@ -108,7 +165,7 @@ def __get_included() -> set[str]: try: module = importlib.import_module(module_name) except ModuleNotFoundError: - return {} + return set() _modules = {module.__name__ + ".rst"} if hasattr(module, "__all__"): @@ -118,3 +175,47 @@ def __get_included() -> set[str]: return _modules return get_all_from_module("botcore") + + +def reorder_release_entries(release_list: list[releases.Release]) -> None: + """ + Sort `releases` based on `release.type`. + + This is meant to be used as an override for `releases.reorder_release_entries` to support + custom types. + """ + order = {"breaking": 0, "feature": 1, "bug": 2, "support": 3} + for release in release_list: + release["entries"].sort(key=lambda entry: order[entry.type]) + + +def emphasized_url( + name: str, rawtext: str, text: str, lineno: int, inliner: docutils.parsers.rst.states.Inliner, *__ +) -> tuple[list, list]: + """ + Sphinx role to add hyperlinked literals. + + ReST: :literal-url:`Google <https://google.com>` + Markdown equivalent: [`Google`](https://google.com) + + Refer to https://docutils.sourceforge.io/docs/howto/rst-roles.html for details on the input and output. + """ + arguments = text.rsplit(maxsplit=1) + if len(arguments) != 2: + message = inliner.reporter.error( + f"`{name}` expects a message and a URL, formatted as: :{name}:`message <url>`", + line=lineno + ) + problem = inliner.problematic(text, rawtext, message) + return [problem], [message] + + message, url = arguments + url: str = url[1:-1] # Remove the angled brackets off the start and end + + literal = docutils.nodes.literal(rawtext, message) + return [docutils.nodes.reference(rawtext, "", literal, refuri=url)], [] + + +def get_recursive_file_uris(folder: Path, match_pattern: str) -> list[str]: + """Get the URI of any file relative to folder which matches the `match_pattern` regex.""" + return [file.relative_to(folder).as_posix() for file in folder.rglob(match_pattern)] |