diff --git a/.github/workflows/docs_test_build.yml b/.github/workflows/docs_test_build.yml index a208f7bcd..b18013e29 100644 --- a/.github/workflows/docs_test_build.yml +++ b/.github/workflows/docs_test_build.yml @@ -37,7 +37,7 @@ jobs: # auto-generate the python API reference) pip install . # install doxygen - sudo apt-get install doxygen pandoc + sudo apt-get install doxygen pandoc graphviz - name: Build the docs run: | # the -W flag tells sphinx to treat any warnings as errors diff --git a/docs/doxygen/Doxyfile b/docs/doxygen/Doxyfile index 4fedbe262..2193ee715 100644 --- a/docs/doxygen/Doxyfile +++ b/docs/doxygen/Doxyfile @@ -323,7 +323,7 @@ OPTIMIZE_OUTPUT_SLICE = NO # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. -EXTENSION_MAPPING = +EXTENSION_MAPPING = cu=C++ # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable @@ -861,6 +861,7 @@ FILE_PATTERNS = *.c \ *.cxx \ *.cpp \ *.c++ \ + *.cu \ *.java \ *.ii \ *.ixx \ @@ -1737,7 +1738,7 @@ EXTRA_SEARCH_MAPPINGS = # If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output. # The default value is: YES. -GENERATE_LATEX = YES +GENERATE_LATEX = NO # The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of @@ -2039,7 +2040,7 @@ MAN_LINKS = NO # captures the structure of the code including all documentation. # The default value is: NO. -GENERATE_XML = YES # NO +GENERATE_XML = NO # The XML_OUTPUT tag is used to specify where the XML pages will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of @@ -2159,7 +2160,7 @@ ENABLE_PREPROCESSING = YES # The default value is: NO. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -MACRO_EXPANSION = NO +MACRO_EXPANSION = YES # If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then # the macro expansion is limited to the macros specified with the PREDEFINED and @@ -2167,7 +2168,7 @@ MACRO_EXPANSION = NO # The default value is: NO. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -EXPAND_ONLY_PREDEF = NO +EXPAND_ONLY_PREDEF = YES # If the SEARCH_INCLUDES tag is set to YES, the include files in the # INCLUDE_PATH will be searched if a #include is found. @@ -2199,7 +2200,7 @@ INCLUDE_FILE_PATTERNS = # recursively expanded use the := operator instead of the = operator. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -PREDEFINED = +PREDEFINED = __inline__=inline # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The diff --git a/docs/sphinx/Reference/InternalApiRef.md b/docs/sphinx/Reference/InternalApiRef.md deleted file mode 100644 index 0aaa30cbf..000000000 --- a/docs/sphinx/Reference/InternalApiRef.md +++ /dev/null @@ -1,14 +0,0 @@ -# Internal API - -:::{todo} -Re-enable Breathe Extension so we can render the internal API documentation generated through doxygen. - -This machinery is left over from an earlier attempt to use Sphinx and currently produces errors. -I suspect there are some issues with doxygen docstrings since we haven't been consistently running it. -::: - - diff --git a/docs/sphinx/Reference/index.md b/docs/sphinx/Reference/index.md index c94460b25..92c1f1581 100644 --- a/docs/sphinx/Reference/index.md +++ b/docs/sphinx/Reference/index.md @@ -5,7 +5,8 @@ Parameters.md MakefileConfiguration.md -InternalApiRef.md PythonApiRef.rst + +Internal API Reference ::: diff --git a/docs/sphinx/Reference/internal-api-ref/index.rst b/docs/sphinx/Reference/internal-api-ref/index.rst new file mode 100644 index 000000000..c20af77ad --- /dev/null +++ b/docs/sphinx/Reference/internal-api-ref/index.rst @@ -0,0 +1,10 @@ +DUMMY_TITLE +=========== +I am a dummy file that was written by the doxybuild Sphinx Extension. + +My purpose is to get processed by Sphinx so that Sphinx generates a dummy +webpage and properly links to that page. +The doxybuild extension should then replace this page with the +doxygen-generated webpages. + +Something went wrong if you are reading this on a rendered webpage! diff --git a/docs/sphinx/_ext/doxybuild/__init__.py b/docs/sphinx/_ext/doxybuild/__init__.py new file mode 100644 index 000000000..9a4fca7d9 --- /dev/null +++ b/docs/sphinx/_ext/doxybuild/__init__.py @@ -0,0 +1,129 @@ +""" +Build doxygen documentation. +""" + +from collections.abc import Mapping +from collections import ChainMap +import os +import shutil +import textwrap + +from sphinx.application import Sphinx +from sphinx.util.typing import ExtensionMetadata + +from .build_snap import build_consistent_with_cache, try_measure_snap +from .run_doxygen import DoxyBuildPaths, run_doxygen, write_custom_doxyfile_if_needed + + +def generate_doxygen( + build_paths: DoxyBuildPaths, + html_file_extension: str = ".html", + extra_overrides: Mapping[str, str] | None = None, +): + baseline = {"HTML_FILE_EXTENSION": html_file_extension} + + if extra_overrides is None: + extra_overrides = baseline + else: + extra_overrides = ChainMap(extra_overrides, baseline) + + # write the generated doxygen config file to override certain properties + # (overwrite any existing file if it doesn't match) + reuse_custom_doxyfile = not write_custom_doxyfile_if_needed( + build_paths, extra_overrides + ) + + cache_file = os.path.join(build_paths.build_cache_dir, "cached_mtimes.json") + if reuse_custom_doxyfile and build_consistent_with_cache(cache_file, build_paths): + pass # we can skip the build! + else: + success = run_doxygen(build_paths) + # write a snapshot on success + if success: + try_measure_snap(build_paths, loudly_fail=True).write_json(cache_file) + + # copy the files into the builddir + # TODO: DO THIS AT THE VERY END OF A BUILD + + +def setup_stub_files(app: Sphinx) -> None: + """ + Write a stub file into the source-directory location where we will be + copying the doxygen-generated index file at the end of the build + + We need to do this so that Sphinx-Generated Table Of Contents link + to the doxygen page properly links agains the doxygen webpages + """ + dest_dir = os.path.join(app.srcdir, app.config.doxybuild_dest_dir) + stub_file = os.path.join(dest_dir, "index.rst") + if not os.path.exists(stub_file): + if not os.path.isdir(dest_dir): + os.mkdir(dest_dir) + contents = textwrap.dedent("""\ + DUMMY_TITLE + =========== + I am a dummy file that was written by the doxybuild Sphinx Extension. + + My purpose is to get processed by Sphinx so that Sphinx generates a dummy + webpage and properly links to that page. + The doxybuild extension should then replace this page with the + doxygen-generated webpages. + + Something went wrong if you are reading this on a rendered webpage! + """) + with open(stub_file, "w") as f: + f.write(contents) + + +def copy_doxygen_html(app: Sphinx, exception: None) -> None: + """ + Copy the previously generated doxygen html into the output directory + of the sphinx build + """ + dest_dir: os.PathLike = os.path.join(app.outdir, app.config.doxybuild_dest_dir) + build_paths: DoxyBuildPaths = app.config.doxybuild_build_paths + assert os.path.isdir(dest_dir) + shutil.copytree(src=build_paths.dox_build_dir, dst=dest_dir, dirs_exist_ok=True) + + +_CONFIG_VALS = [ # fmt: (name, default, rebuild, types) + # these first 2 paths are specified relative to the config directory + ("doxybuild_hardcoded_doxyfile", None, "env", frozenset([str])), + ("doxybuild_src_code_dir", None, "env", frozenset([str])), + # specified relative to the config directory + ("doxybuild_dest_dir", None, "env", frozenset([str])), + # a dict holding override values + ("doxybuild_overrides", None, "env", frozenset([dict])), +] + + +def setup(app: Sphinx) -> ExtensionMetadata: + if os.getenv("SKIPDOXYGEN", "FALSE").lower() != "true": + for name, default, rebuild, types in _CONFIG_VALS: + app.add_config_value(name, default, rebuild, types=types) + + app.connect("builder-inited", setup_stub_files) + app.connect("build-finished", copy_doxygen_html) + + app.config.doxybuild_build_paths = DoxyBuildPaths.create( + # just like breathe, we're going to assume that the general build directory + # is the parent of the doctree directory, and we'll create a cache location + # location there for our own use + doxybuild_build_cache_dir=os.path.join( + os.path.dirname(os.path.abspath(app.doctreedir)), "doxybuild-cache" + ), + cpp_src_dir=os.path.join(app.confdir, app.config.doxybuild_src_code_dir), + hardcoded_doxyfile=os.path.join( + app.confdir, app.config.doxybuild_hardcoded_doxyfile + ), + ) + + # STEP 2: actually run doxygen and generate files (these will be copied later) + html_file_suffix = app.config.html_file_suffix + html_file_suffix = ".html" if html_file_suffix is None else html_file_suffix + generate_doxygen( + build_paths=app.config.doxybuild_build_paths, + html_file_extension=html_file_suffix, + extra_overrides=app.config.doxybuild_overrides, + ) + return {"version": "0.1", "parallel_read_safe": False, "parallel_write_safe": False} diff --git a/docs/sphinx/_ext/doxybuild/build_snap.py b/docs/sphinx/_ext/doxybuild/build_snap.py new file mode 100644 index 000000000..81b75df9d --- /dev/null +++ b/docs/sphinx/_ext/doxybuild/build_snap.py @@ -0,0 +1,186 @@ +""" +Logic for constructing a cache of modification times. +""" + +from collections.abc import Iterator +import dataclasses +import json +import os + +from .run_doxygen import DoxyBuildPaths + + +@dataclasses.dataclass +class DoxyBuildSnapshot: + """ + Represents a snapshot of a doxygen build + """ + + # lists paths to directories or individual files that are relevant to + # the build + build_paths: DoxyBuildPaths + + # holds the mtimes for each entry in build_paths + mtimes: dict[str, int] + + # list of files constructed within dox_build_dir (to be clear, they should + # only contain the basename of each file) + build_dir_artifacts: set[str] + + def __post_init__(self): + assert self.build_dir_artifacts is not None # sanity check! + + def write_json(self, path): + d = { + "build_paths": self.build_paths.to_serialization_dict(), + "mtimes": self.mtimes, + "build_dir_artifacts": list(self.build_dir_artifacts), + } + with open(path, "w") as f: + json.dump(d, f) + + @classmethod + def from_json(cls, path): + with open(path, "r") as f: + data = json.load(f) + return cls( + build_paths=DoxyBuildPaths(**data["build_paths"]), + mtimes=data["mtimes"], + build_dir_artifacts=set(data["build_dir_artifacts"]), + ) + + +def _it_tree_names(dir_path: os.PathLike) -> Iterator[str]: + """ + Recursively walk the names of file or directory in the specified path + + The name is the location relative to dir_path. To get the path from the + name, use `os.path.join(dir_path, name)` + """ + dir_path = os.path.normpath(str(dir_path)) + n_dir_path_chars = len(dir_path) + for root_path, dirs, files in os.walk(dir_path, followlinks=False): + # root_path holds the path to root + # root_name holds the path to root relative to dir_path + if root_path == dir_path: + root_name = "" + else: + assert root_path[n_dir_path_chars] == os.sep # sanity_check + root_name = root_path[n_dir_path_chars + 1 :] + + yield from (os.path.join(root_name, d) for d in dirs) + yield from (os.path.join(root_name, f) for f in files) + + +def _get_mtime(nominal_path: os.PathLike, return_nameset: bool = False): + """ + Walk a directory and determine the most recent time at which a contained + file/directory was modified, created, deleted, removed, etc. + + Parameters + ---------- + nominal_path + The nominal path to measure the mtime for + return_nameset + When True, returns set holding the locations of all queried + files/directory, relative to nominal_path, other than + nominal_path itself. + """ + + def _fn(path): # gives posix timestamp in seconds (rounded up) + return int(os.stat(path).st_mtime + 1) + + # explicitly measure mtime of nominal_path + max_mtime = _fn(nominal_path) + nameset = set() + + if os.path.isdir(nominal_path): + # make iterator over the mtimes of each item in dir_path. We explicitly + # check mtimes of directories since they provide the only indication that + # files within a that directory were deleted/moved + for name in _it_tree_names(nominal_path): + if return_nameset: + nameset.add(name) + max_mtime = max(_fn(os.path.join(nominal_path, name)), max_mtime) + if return_nameset: + return max_mtime, nameset + return max_mtime + + +def try_measure_snap( + build_paths: DoxyBuildPaths, loudly_fail: bool = False +) -> None | DoxyBuildSnapshot: + """ + try to measure the mtime (modification time) for each the specified + dependenies/artifacts of the doxygen process + + Parameters + ---------- + build_paths + Holds the paths for each of the dependencies and artifacts of the + doxygen build + loudly_fail + When True, this will raise an exception upon failure + + Returns + ------- + None or DoxyBuildPaths[int] + If one or more of the provided paths don't exist, then None is + returned. Otherwise, a DoxyBuildPaths, where each field holds + the appropriate modification time, is returned + """ + mtimes = {} + build_dir_artifacts = None + + for field in dataclasses.fields(build_paths): + name = field.name + path = getattr(build_paths, name) + + if not os.path.exists(path): + return None + elif name == "dox_build_dir": + mtimes[name], build_dir_artifacts = _get_mtime(path, True) + mtimes[name] = _get_mtime(path, False) + + return DoxyBuildSnapshot( + build_paths=build_paths, mtimes=mtimes, build_dir_artifacts=build_dir_artifacts + ) + + +def build_consistent_with_cache( + cache_file: os.PathLike, depend_artifact_paths: DoxyBuildPaths +) -> bool: + """ + Checks + Checks whether the modification times for each dependency or artifact of + the doxygen build matches the modification times from a previous build + + Parameters + ---------- + cache_file + Path to file where a DoxyBuildSnapshot would have been saved by a + previous snapshot + depend_artifact_paths + Holds the paths for each of the dependencies & artifacts of the + doxygen build + + Returns + ------- + bool + Indicates whether there was a match + """ + + # load the cached_mtimes + try: + cached_snap = DoxyBuildSnapshot.from_json(cache_file) + except (FileNotFoundError, KeyError): + return False # if we can't find the cached mtimes, we need to call doxygen + + # measure the modification times + actual_snap = try_measure_snap(depend_artifact_paths) + + if actual_snap is None: + # one or more dependencies/artifacts is missing. + return False + + return actual_snap == cached_snap diff --git a/docs/sphinx/_ext/doxybuild/run_doxygen.py b/docs/sphinx/_ext/doxybuild/run_doxygen.py new file mode 100644 index 000000000..ad34c4883 --- /dev/null +++ b/docs/sphinx/_ext/doxybuild/run_doxygen.py @@ -0,0 +1,151 @@ +""" +Does the heavy-lifting of actually executing doxygen +""" + +from collections.abc import Mapping +import dataclasses +import filecmp +import os +import shutil +import string +import subprocess +import sys +import tempfile +import typing + + +@dataclasses.dataclass(kw_only=True, frozen=True) +class DoxyBuildPaths: + """ + Each field corresponds to a file or directory of files that are either + dependencies of doxygen or artifacts produced by doxygen. + """ + + # The c++ source directory that doxygen docs are build from + cpp_src_dir: os.PathLike + # the standard doxygen file (that is committed to the repository) + hardcoded_doxygen_file: os.PathLike + # path to the doxygen file generated by this machinery + generated_doxyfile: os.PathLike + # path to the doxygen directory where the build-products are placed + dox_build_dir: os.PathLike + + @classmethod + def create( + cls, + doxybuild_build_cache_dir: os.PathLike, + cpp_src_dir: os.PathLike, + hardcoded_doxyfile: os.PathLike, + ): + return cls( + cpp_src_dir=cpp_src_dir, + hardcoded_doxygen_file=hardcoded_doxyfile, + generated_doxyfile=os.path.join(doxybuild_build_cache_dir, "Doxyfile"), + dox_build_dir=os.path.join(doxybuild_build_cache_dir, "html"), + ) + + @property + def build_cache_dir(self): + return os.path.dirname(self.dox_build_dir) + + def __eq__(self, other): + for field in dataclasses.fields(self): + attr = field.name + left = os.path.abspath(str(getattr(self, attr))) + right = os.path.abspath(str(getattr(other, attr))) + if left != right: + return False + return True + + def to_serialization_dict(self): + # creates a dict (to be used for dumping JSON) + return { + field.name: str(getattr(self, field.name)) + for field in dataclasses.fields(self) + } + + +_TEMPLATE = """ +# this is a template for a Doxyfile that can be used by Sphinx +# -> the basic premise is to leave the original Doxygen file with sensible +# defaults (so that we can build the docs without sphinx) +# -> this file will pull in those settings and selectively overwrite them + +@INCLUDE = {RAW_CONF_FILE} + +OUTPUT_DIRECTORY = {OUTPUT_DIRECTORY} +INPUT = {INPUT} +""" +# PROJECT_NUMBER = {DOXYGEN_VERSION_STRING} +# DOT_PATH = {DOXYGEN_DOT_PATH} + + +def _write_template(f: typing.IO, template: str, mapping: Mapping[str, str]): + # get the field names in the string + field_name_set = set( + quad[1] for quad in string.Formatter().parse(template) if quad[1] is not None + ) + if len(field_name_set.symmetric_difference(mapping)) != 0: + for name in field_name_set: + if name not in mapping: + raise ValueError(f"{name!r} was not provided by the mapping") + for name in mapping: + if name not in field_name_set: + raise ValueError( + f"{name!r} was provided by the mapping, but not known to template" + ) + else: + f.write(template.format_map(mapping)) + + +def write_custom_doxyfile_if_needed( + build_paths: DoxyBuildPaths, extra_overrides: dict[str, str] +) -> bool: + # returns whether a new doxyfile was written (or if one was reused from + # a prior build) + + template_mapping = { + "RAW_CONF_FILE": os.path.abspath(build_paths.hardcoded_doxygen_file), + "INPUT": os.path.abspath(build_paths.cpp_src_dir), + "OUTPUT_DIRECTORY": os.path.dirname(build_paths.dox_build_dir), + } + + with tempfile.TemporaryDirectory() as tmpdirname: + path = os.path.join(tmpdirname, "Doxyfile") + with open(path, "w") as f: + _write_template(f, _TEMPLATE, template_mapping) + for key, value in extra_overrides.items(): + assert key not in template_mapping # sanity check! + f.write(f"{key} = {value}\n") + if os.path.isfile(build_paths.generated_doxyfile): + if filecmp.cmp(path, build_paths.generated_doxyfile, shallow=False): + return False + os.remove(build_paths.generated_doxyfile) + out_dirname = os.path.dirname(build_paths.generated_doxyfile) + if not os.path.isdir(out_dirname): + os.makedirs(out_dirname) + shutil.move(path, build_paths.generated_doxyfile) + return True + + +def run_doxygen(build_paths: DoxyBuildPaths) -> bool: + """ + Execute doxygen and return whether it succeeded + """ + + if os.path.exists(build_paths.dox_build_dir): + shutil.rmtree(build_paths.dox_build_dir) + + success = False + try: + retcode = subprocess.call(["doxygen", build_paths.generated_doxyfile]) + if retcode == 0: + success = True + else: + sys.stderr.write("doxygen terminated by signal %s" % (-retcode)) + except OSError as e: + sys.stderr.write(f"doxygen execution failed: {e}") + + if (not success) and os.path.exists(build_paths.dox_build_dir): + shutil.rmtree(build_paths.dox_build_dir) + return success diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py index f0c7e8e34..81a3bfbc1 100644 --- a/docs/sphinx/conf.py +++ b/docs/sphinx/conf.py @@ -10,10 +10,7 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -import functools -import json import os -import subprocess import sys # If extensions (or modules to document with autodoc) are in another directory, @@ -36,7 +33,6 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'breathe', 'myst_parser', 'nbsphinx', 'sphinx.ext.autodoc', @@ -54,6 +50,7 @@ # Custom Extensions # ----------------- + "doxybuild", "par", "cli_help", ] @@ -102,6 +99,17 @@ "fieldlist" ] +# -- Options for doxybuild extension ----------------------------------------- + +# path to the baseline doxyfile (relative to this config file) +doxybuild_hardcoded_doxyfile = "../doxygen/Doxyfile" +# path to the C++ source code directory (relative to this config file) +doxybuild_src_code_dir = "../../src" +# path relative to the source directory where the stub files are written +doxybuild_dest_dir = "Reference/internal-api-ref" +# override doxygen parameters https://www.doxygen.nl/manual/config.html +doxybuild_overrides = {"PROJECT_NUMBER": release} + # -- Options for par extension ----------------------------------------------- par_separator = "." @@ -125,77 +133,3 @@ # -- Doxygen/Breathe Stuff --------------------------------------------------- -def _it_tree_paths(dir_path, include_dirs=True): - # recursive iterate over paths to all files (and possibly directories) - for root, dirs, files in os.walk(dir_path, followlinks=False): - if include_dirs: - yield from (os.path.join(root, d) for d in dirs) - yield from (os.path.join(root, f) for f in files) - -def _dirtree_mtime(dir_path): - """ - Walk a directory and determine the most recent time at which a contained - file/directory was modified, created, deleted, removed, etc. - """ - - def get_mtime(path): # gives posix timestamp in seconds (rounded up) - return os.stat(dir_path).st_mtime + 1 - - # explicitly measure mtime of root dir - root_mtime = get_mtime(dir_path) - - # make iterator over the mtimes of each item in dir_path. We explicitly - # check mtimes of directories since they provide the only indication that - # files within a that directory were deleted/moved - itr = (get_mtime(p) for p in _it_tree_paths(dir_path, include_dirs=True)) - - return functools.reduce(max, itr, root_mtime) - -def build_doxygen(): - if os.getenv("SKIPDOXYGEN", "FALSE").lower() == "true": - return None # skip a rebuild - - # load cached modification times (if they exist) - try: - with open("../cached_mtimes.json", "r") as f: - cached_mtimes = json.load(f) - except FileNotFoundError: - cached_mtimes = {"src" : None, "dox" : None} - - dox_builddir = "../doxygen/build/xml/" - - # determine modification time of the source code directory tree - src_mtime = _dirtree_mtime("../../src") - - # if the doxygen build-dir already exists, and the modification times - # (of the source code directory and the doxygen build-dir) match the - # cached values, then we don't need to regenerate the documentation - if ( - os.path.isdir(dox_builddir) and - (cached_mtimes["src"] == src_mtime) and - (cached_mtimes["dox"] == _dirtree_mtime(dox_builddir)) - ): - return None - - try: - retcode = subprocess.call("doxygen", cwd="../doxygen") - - if retcode < 0: - sys.stderr.write("doxygen terminated by signal %s" % (-retcode)) - else: - # get modification time of dox_builddir (after the build) - dox_mtime = _dirtree_mtime(dox_builddir) - mtime_pack = {"src": src_mtime, "dox": dox_mtime} - with open("../cached_mtimes.json", "w") as f: - json.dump(mtime_pack, f) - except OSError as e: - sys.stderr.write(f"doxygen execution failed: {e}") - - return {"cholla": dox_builddir} - -_header_files = [ - os.path.abspath(p) for p in _it_tree_paths("../../src") if p.endswith(".h") -] -breath_projects = build_doxygen() # always build doxygen docs (for local & rtd builds) -breathe_default_project = "cholla" -breathe_projects_source = {"cholla" : ( "../../src/", _header_files )}