diff --git a/Project.toml b/Project.toml index 881e9a3c8..e02d22bb5 100644 --- a/Project.toml +++ b/Project.toml @@ -34,4 +34,4 @@ julia = "1.6" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test"] +test = ["Test"] \ No newline at end of file diff --git a/docs/.gitignore b/docs/.gitignore deleted file mode 100644 index ba39cc531..000000000 --- a/docs/.gitignore +++ /dev/null @@ -1 +0,0 @@ -Manifest.toml diff --git a/docs/Project.toml b/docs/Project.toml index 01db6a719..6dfe81a83 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,6 +1,7 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" Gen = "ea4f424c-a589-11e8-07c0-fd5c91b9da4a" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" [compat] -Documenter = "0.27" +Documenter = "1" diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..78ddd3f69 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,24 @@ +# Website Docs +- `pages.jl` to find skeleton of website. +- `make.jl` to build the website index. + +The docs are divided in roughly four sections: +- Getting Started + Tutorials +- How-to Guides +- API = Modeling API + Inference API +- Explanations + Internals + + +# Developing +To build the docs, run `julia --make.jl` or alternatively startup the Julia REPL and include `make.jl`. For debugging, consider setting `draft=true` in the `makedocs` function found in `make.jl`. +Currently you must write the tutorial directly in the docs rather than a source file (e.g. Quarto). See `getting_started` or `tutorials` for examples. + +Code snippets must use the triple backtick with a label to run. The environment carries over so long as the labels match. Example: + +```@example tutorial_1 +x = rand() +``` + +```@example tutorial_1 +print(x) +``` \ No newline at end of file diff --git a/docs/build_docs_locally.sh b/docs/build_docs_locally.sh deleted file mode 100755 index 40d6d9002..000000000 --- a/docs/build_docs_locally.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -# run this script from the Gen/ directory, it will generate HTML documentation under docs/build - -julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' -julia --project=docs/ docs/make.jl diff --git a/docs/make.jl b/docs/make.jl index 9c641d022..92a7cd065 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,37 +1,20 @@ +# Run: julia --project make.jl using Documenter, Gen +include("pages.jl") makedocs( - sitename = "Gen", modules = [Gen], - pages = [ - "Home" => "index.md", - "Getting Started" => "getting_started.md", - "Tutorials" => "tutorials.md", - "Modeling Languages and APIs" => [ - "Generative Functions" => "ref/gfi.md", - "Probability Distributions" => "ref/distributions.md", - "Built-in Modeling Language" => "ref/modeling.md", - "Generative Function Combinators" => "ref/combinators.md", - "Choice Maps" => "ref/choice_maps.md", - "Selections" => "ref/selections.md", - "Optimizing Trainable Parameters" => "ref/parameter_optimization.md", - "Trace Translators" => "ref/trace_translators.md", - "Extending Gen" => "ref/extending.md" - ], - "Standard Inference Library" => [ - "Importance Sampling" => "ref/importance.md", - "MAP Optimization" => "ref/map.md", - "Markov chain Monte Carlo" => "ref/mcmc.md", - "MAP Optimization" => "ref/map.md", - "Particle Filtering" => "ref/pf.md", - "Variational Inference" => "ref/vi.md", - "Learning Generative Functions" => "ref/learning.md" - ], - "Internals" => [ - "Optimizing Trainable Parameters" => "ref/internals/parameter_optimization.md", - "Modeling Language Implementation" => "ref/internals/language_implementation.md" - ] - ] + doctest = false, + clean = true, + warnonly = true, + format = Documenter.HTML(; + assets = String["assets/header.js", "assets/header.css", "assets/theme.css"], + collapselevel=1, + ), + sitename = "Gen.jl", + pages = pages, + checkdocs=:exports, + pagesonly=true, ) deploydocs( diff --git a/docs/pages.jl b/docs/pages.jl new file mode 100644 index 000000000..dc340eb7e --- /dev/null +++ b/docs/pages.jl @@ -0,0 +1,53 @@ +pages = [ + "Home" => "index.md", + "Getting Started" => [ + "Example 1: Linear Regression" => "getting_started/linear_regression.md", + ], + "Tutorials" => [ + "Basics" => [ + "tutorials/basics/modeling_in_gen.md", + "tutorials/basics/gfi.md", + "tutorials/basics/combinators.md", + "tutorials/basics/particle_filter.md", + "tutorials/basics/vi.md", + ], + "Advanced" => [ + "tutorials/trace_translators.md", + ], + "Model Optmizations" => [ + "Speeding Inference with the Static Modeling Language" => "tutorials/model_optimizations/scaling_with_sml.md", + ], + ], + "How-to Guides" => [ + "MCMC Kernels" => "how_to/mcmc_kernels.md", + "Custom Distributions" => "how_to/custom_distributions.md", + "Custom Modeling Languages" => "how_to/custom_dsl.md", + "Custom Gradients" => "how_to/custom_derivatives.md", + "Incremental Computation" => "how_to/custom_incremental_computation.md", + ], + "API Reference" => [ + "Modeling Library" => [ + "Generative Functions" => "api/model/gfi.md", + "Probability Distributions" => "api/model/distributions.md", + "Choice Maps" => "api/model/choice_maps.md", + "Built-in Modeling Languages" => "api/model/modeling.md", + "Combinators" => "api/model/combinators.md", + "Selections" => "api/model/selections.md", + "Optimizing Trainable Parameters" => "api/model/parameter_optimization.md", + "Trace Translators" => "api/model/trace_translators.md", + ], + "Inference Library" => [ + "Importance Sampling" => "api/inference/importance.md", + "MAP Optimization" => "api/inference/map.md", + "Markov chain Monte Carlo" => "api/inference/mcmc.md", + "MAP Optimization" => "api/inference/map.md", + "Particle Filtering" => "api/inference/pf.md", + "Variational Inference" => "api/inference/vi.md", + "Learning Generative Functions" => "api/inference/learning.md" + ], + ], + "Explanation and Internals" => [ + "Modeling Language Implementation" => "explanations/language_implementation.md", + "explanations/combinator_design.md" + ] +] diff --git a/docs/src/ref/importance.md b/docs/src/api/inference/importance.md similarity index 100% rename from docs/src/ref/importance.md rename to docs/src/api/inference/importance.md diff --git a/docs/src/ref/learning.md b/docs/src/api/inference/learning.md similarity index 99% rename from docs/src/ref/learning.md rename to docs/src/api/inference/learning.md index 2192793fc..daa62eb85 100644 --- a/docs/src/ref/learning.md +++ b/docs/src/api/inference/learning.md @@ -209,7 +209,7 @@ Then, the traces of the model can be obtained by simulating from the variational Instead of fitting the variational approximation from scratch for each observation, it is possible to fit an *inference model* instead, that takes as input the observation, and generates a distribution on latent variables as output (as in the wake sleep algorithm). When we train the variational approximation by minimizing the evidence lower bound (ELBO) this is called amortized variational inference. Variational autencoders are an example. -It is possible to perform amortized variational inference using [`black_box_vi`](@ref) or [`black_box_vimco!`](@ref). +It is possible to perform amortized variational inference using [`black_box_vi!`](@ref) or [`black_box_vimco!`](@ref). ## References diff --git a/docs/src/ref/map.md b/docs/src/api/inference/map.md similarity index 100% rename from docs/src/ref/map.md rename to docs/src/api/inference/map.md diff --git a/docs/src/api/inference/mcmc.md b/docs/src/api/inference/mcmc.md new file mode 100644 index 000000000..bd6df6d00 --- /dev/null +++ b/docs/src/api/inference/mcmc.md @@ -0,0 +1,19 @@ +# Markov chain Monte Carlo (MCMC) + +Gen supports standard Markov Chain Monte Carlo algorithms and allows users to write their own custom kernels. +```@index +Pages = ["mcmc.md"] +``` + +```@docs +metropolis_hastings +mh +mala +hmc +elliptical_slice +@pkern +@kern +@rkern +reversal +involutive_mcmc +``` diff --git a/docs/src/ref/pf.md b/docs/src/api/inference/pf.md similarity index 100% rename from docs/src/ref/pf.md rename to docs/src/api/inference/pf.md diff --git a/docs/src/api/inference/vi.md b/docs/src/api/inference/vi.md new file mode 100644 index 000000000..4d55fb43c --- /dev/null +++ b/docs/src/api/inference/vi.md @@ -0,0 +1,7 @@ +## Variational inference +There are two procedures in the inference library for performing black box variational inference. +Each of these procedures can also train the model using stochastic gradient descent, as in a variational autoencoder. +```@docs +black_box_vi! +black_box_vimco! +``` diff --git a/docs/src/ref/choice_maps.md b/docs/src/api/model/choice_maps.md similarity index 87% rename from docs/src/ref/choice_maps.md rename to docs/src/api/model/choice_maps.md index c065b1b32..b5220c2ba 100644 --- a/docs/src/ref/choice_maps.md +++ b/docs/src/api/model/choice_maps.md @@ -30,7 +30,13 @@ Choice maps also implement: - `==`, which tests if two choice maps have the same addresses and values at those addresses. -## Mutable Choice Maps + +```@docs +DynamicChoiceMap +EmptyChoiceMap +StaticChoiceMap +choicemap +``` A mutable choice map can be constructed with [`choicemap`](@ref), and then populated: ```julia @@ -45,8 +51,18 @@ There is also a constructor that takes initial (address, value) pairs: choices = choicemap((:x, true), ("foo", 1.25), (:y => 1 => :z, -6.3)) ``` + ```@docs -choicemap set_value! set_submap! +Base.merge(::ChoiceMap, ::ChoiceMap) +Base.merge(::ChoiceMap, ::Vararg{ChoiceMap}) +Base.isempty(::ChoiceMap) ``` + +```@docs +Gen.pair +Gen.unpair +Gen.ChoiceMapNestedView +``` + diff --git a/docs/src/ref/combinators.md b/docs/src/api/model/combinators.md similarity index 99% rename from docs/src/ref/combinators.md rename to docs/src/api/model/combinators.md index f02819763..229896523 100644 --- a/docs/src/ref/combinators.md +++ b/docs/src/api/model/combinators.md @@ -112,7 +112,9 @@ FunctionalCollections.PersistentVector{Any}[true, false, true, false, true] ## Recurse combinator -TODO: document me +```@docs +Recurse +``` ```@raw html
@@ -161,3 +163,4 @@ The resulting trace contains the subtrace from the branch with index `2` - in th │ └── :z : 13.552870875213735 ``` + diff --git a/docs/src/ref/distributions.md b/docs/src/api/model/distributions.md similarity index 98% rename from docs/src/ref/distributions.md rename to docs/src/api/model/distributions.md index f4e8ce772..d3828b59f 100644 --- a/docs/src/ref/distributions.md +++ b/docs/src/api/model/distributions.md @@ -1,4 +1,11 @@ -# Probability Distributions +# [Probability Distributions](@id distributions) + +```@docs +random +logpdf +has_output_grad +logpdf_grad +``` Gen provides a library of built-in probability distributions, and four ways of defining custom distributions, each of which are explained below: @@ -39,6 +46,7 @@ piecewise_uniform poisson uniform uniform_discrete +broadcasted_normal ``` ## [Defining New Distributions Inline with the `@dist` DSL](@id dist_dsl) diff --git a/docs/src/api/model/gfi.md b/docs/src/api/model/gfi.md new file mode 100644 index 000000000..0385988be --- /dev/null +++ b/docs/src/api/model/gfi.md @@ -0,0 +1,55 @@ +## [Generative Functions](@id gfi_api) + +```@docs +GenerativeFunction +Trace +``` + +The complete set of methods in the generative function interface (GFI) is: + +```@docs +simulate +generate +update +regenerate +get_args +get_retval +get_choices +get_score +get_gen_fn +Base.getindex +project +propose +assess +has_argument_grads +has_submap +accepts_output_grad +accumulate_param_gradients! +choice_gradients +get_params +``` + +```@docs +Diff +NoChange +UnknownChange +SetDiff +Diffed +``` + +```@docs +CustomUpdateGF +apply_with_state +update_with_state +``` + +```@docs +CustomGradientGF +apply +gradient +``` + +```@docs +Gen.init_update_state +Gen.apply_update! +``` \ No newline at end of file diff --git a/docs/src/ref/modeling.md b/docs/src/api/model/modeling.md similarity index 95% rename from docs/src/ref/modeling.md rename to docs/src/api/model/modeling.md index a89367fd7..2e3c319a2 100644 --- a/docs/src/ref/modeling.md +++ b/docs/src/api/model/modeling.md @@ -1,4 +1,4 @@ -# Built-in Modeling Language +# [The Dynamic Modeling Language](@id dynamic_modeling_language) Gen provides a built-in embedded modeling language for defining generative functions. The language uses a syntax that extends Julia's syntax for defining regular Julia functions, and is also referred to as the **Dynamic Modeling Language**. @@ -29,14 +29,14 @@ We can also trace its execution: ``` Optional arguments can be left out of the above operations, and default values will be filled in automatically: ```julia -julia> (trace, _) = generate(foo, (,)); +julia> (trace, _) = generate(foo, ()) julia> get_args(trace) (0.1,) ``` -See [Generative Functions](@ref) for the full set of operations supported by a generative function. +See [Generative Functions](@ref gfi_api) for the full set of operations supported by a generative function. Note that the built-in modeling language described in this section is only one of many ways of defining a generative function -- generative functions can also be constructed using other embedded languages, or by directly implementing the methods of the generative function interface. However, the built-in modeling language is intended to being flexible enough cover a wide range of use cases. -In the remainder of this section, we refer to generative functions defined using the built-in modeling language as `@gen` functions. Details about the implementation of `@gen` functions can be found in the [Modeling Language Implementation](@ref) section. +In the remainder of this section, we refer to generative functions defined using the built-in modeling language as `@gen` functions. Details about the implementation of `@gen` functions can be found in the [Modeling Language Implementation](@ref language-implementation) section. ## Annotations @@ -57,7 +57,7 @@ Each argument can have the following different syntactic forms: Currently, the possible argument annotations are: -- `grad` (see [Differentiable programming](@ref)). +- `grad` (see [Differentiable programming](@ref differentiable_modeling)). **Function annotations.** The `@gen` function itself can also be optionally associated with zero or more annotations, which are separate from the per-argument annotations. Function-level annotations use the following different syntactic forms: @@ -70,11 +70,11 @@ Function-level annotations use the following different syntactic forms: Currently the possible function annotations are: -- `grad` (see [Differentiable programming](@ref)). +- `grad` (see [Differentiable programming](@ref differentiable_modeling)). -- `static` (see [Static Modeling Language](@ref)). +- `static` (see [Static Modeling Language](@ref sml)). -- `nojuliacache` (see [Static Modeling Language](@ref)). +- `nojuliacache` (see [Static Modeling Language](@ref sml)). ## Making random choices @@ -82,7 +82,7 @@ Random choices are made by calling a probability distribution on some arguments: ```julia val::Bool = bernoulli(0.5) ``` -See [Probability Distributions](@ref) for the set of built-in probability distributions, and for information on implementing new probability distributions. +See [Probability Distributions](@ref distributions) for the set of built-in probability distributions, and for information on implementing new probability distributions. In the body of a `@gen` function, wrapping a call to a random choice with an `@trace` expression associates the random choice with an *address*, and evaluates to the value of the random choice. The syntax is: @@ -145,7 +145,7 @@ It is recommended to write disciplined generative functions when possible. **Untraced call**: If `foo` is a generative function, we can invoke `foo` from within the body of a `@gen` function using regular call syntax. -The random choices made within the call are not given addresses in our trace, and are therefore *untraced* random choices (see [Generative Function Interface](@ref) for details on untraced random choices). +The random choices made within the call are not given addresses in our trace, and are therefore *untraced* random choices (see [Generative Function Interface](@ref gfi) for details on untraced random choices). ```julia val = foo(0.5) ``` @@ -247,10 +247,10 @@ Note that `~` is also defined in `Base` as a unary operator that performs the bi Like regular Julia functions, `@gen` functions return either the expression used in a `return` keyword, or by evaluating the last expression in the function body. Note that the return value of a `@gen` function is different from a trace of `@gen` function, which contains the return value associated with an execution as well as the assignment to each random choice made during the execution. -See [Generative Function Interface](@ref) for more information about traces. +See [Generative Function Interface](@ref gfi) for more information about traces. -## Trainable parameters +## [Trainable Parameters](@id trainable_parameters_modeling) A `@gen` function may begin with an optional block of *trainable parameter declarations*. The block consists of a sequence of statements, beginning with `@param`, that declare the name and Julia type for each trainable parameter. @@ -274,14 +274,17 @@ The following methods are exported for the trainable parameters of `@gen` functi init_param! get_param get_param_grad +set_param_grad! set_param! zero_param_grad! +accumulate_param_gradients_determ! +gradient_with_state ``` Trainable parameters are designed to be trained using gradient-based methods. This is discussed in the next section. -## Differentiable programming +## [Differentiable Programming](@id differentiable_modeling) Given a trace of a `@gen` function, Gen supports automatic differentiation of the log probability (density) of all of the random choices made in the trace with respect to the following types of inputs: @@ -371,7 +374,7 @@ See [ReverseDiff](https://github.com/JuliaDiff/ReverseDiff.jl) for more details. When making a random choice, each argument is either a tracked value or not. If the argument is a tracked value, then the probability distribution must support differentiation of the log probability (density) with respect to that argument. Otherwise, an error is thrown. -The [`has_argument_grads`](@ref) function indicates which arguments support differentiation for a given distribution (see [Probability Distributions](@ref)). +The [`has_argument_grads`](@ref) function indicates which arguments support differentiation for a given distribution (see [Probability Distributions](@ref distributions)). If the gradient is required for the *value* of a random choice, the distribution must support differentiation of the log probability (density) with respect to the value. This is indicated by the [`has_output_grad`](@ref) function. @@ -381,7 +384,7 @@ It is an error if a tracked value is passed as an argument of a generative funct If a generative function `gen_fn` has `accepts_output_grad(gen_fn) = true`, then the return value of the generative function call will be tracked and will propagate further through the caller `@gen` function's computation. -## Static Modeling Language +## [Static Modeling Language](@id sml) The *static modeling language* is a restricted variant of the built-in modeling language. Models written in the static modeling language can result in better inference performance (more inference operations per second and less memory consumption), than the full built-in modeling language, especially for models used with iterative inference algorithms like Markov chain Monte Carlo. @@ -399,7 +402,7 @@ end ``` After running this code, `foo` is a Julia value whose type is a subtype of `StaticIRGenerativeFunction`, which is a subtype of [`GenerativeFunction`](@ref). -### Static computation graph +### Static Computation Graphs Using the `static` annotation instructs Gen to statically construct a directed acyclic graph for the computation represented by the body of the function. For the function `foo` above, the static graph looks like: ```@raw html @@ -431,7 +434,7 @@ First, the definition of a `(static)` generative function is always expected to Next, in order to be able to construct the static graph, Gen restricts the permitted syntax that can be used in functions annotated with `static`. In particular, each statement in the body must be one of the following: -- A `@param` statement specifying any [Trainable parameters](@ref), e.g.: +- A `@param` statement specifying any [trainable parameters](@ref trainable_parameters_modeling), e.g.: ```julia @param theta::Float64 diff --git a/docs/src/ref/parameter_optimization.md b/docs/src/api/model/parameter_optimization.md similarity index 96% rename from docs/src/ref/parameter_optimization.md rename to docs/src/api/model/parameter_optimization.md index 60e05f41d..50f4b323c 100644 --- a/docs/src/ref/parameter_optimization.md +++ b/docs/src/api/model/parameter_optimization.md @@ -1,4 +1,4 @@ -# Optimizing Trainable Parameters +# Trainable Parameters(@trainable_parameter_optimization) Trainable parameters of generative functions are initialized differently depending on the type of generative function. Trainable parameters of the built-in modeling language are initialized with [`init_param!`](@ref). @@ -31,3 +31,4 @@ GradientDescent ADAM ``` For adding new types of update configurations, see [Optimizing Trainable Parameters (Internal)](@ref optimizing-internal). + diff --git a/docs/src/ref/selections.md b/docs/src/api/model/selections.md similarity index 98% rename from docs/src/ref/selections.md rename to docs/src/api/model/selections.md index 152f98b61..86b746bbc 100644 --- a/docs/src/ref/selections.md +++ b/docs/src/api/model/selections.md @@ -23,7 +23,6 @@ If we use this selection in the context of a trace of the function `bar` below, @trace(normal(0, 1), :z) @trace(normal(0, 1), :w) end -end @gen function bar() @trace(bernoulli(0.5), :x) @@ -57,3 +56,6 @@ DynamicSelection StaticSelection ComplementSelection ``` +```@docs +Gen.get_address_schema +``` diff --git a/docs/src/api/model/trace_translators.md b/docs/src/api/model/trace_translators.md new file mode 100644 index 000000000..14de30ec9 --- /dev/null +++ b/docs/src/api/model/trace_translators.md @@ -0,0 +1,20 @@ +## Trace Translators + +```@docs +@transform +@read +@write +@copy +@tcall +pair_bijections! +is_involution! +inverse +TraceTranslator +DeterministicTraceTranslator +GeneralTraceTranslator +SimpleExtendingTraceTranslator +SymmetricTraceTranslator +``` +```@docs +TraceTransformDSLProgram +``` diff --git a/docs/src/assets/header.css b/docs/src/assets/header.css new file mode 100644 index 000000000..155b10a3b --- /dev/null +++ b/docs/src/assets/header.css @@ -0,0 +1,138 @@ + +@media all and (max-width: 560px) { + header.navigation { + position: fixed !important; + left:0; + top: 0; + width: 100%; + } + + header.navigation div.container { + margin-left: 0rem; + } + + header.navigation div.container nav.navbar { + min-height: 1rem !important; + } + + header.navigation div.container nav.navbar ul.navbar-nav { + min-height: 1rem !important; + margin-left: 0.5rem !important; + } + + header.navigation div.container nav.navbar ul.navbar-nav li.small-item { + visibility: visible !important; + display: block !important; + margin: 0.5rem; + } + + header.navigation div.container nav.navbar ul.navbar-nav li.nav-item { + visibility: hidden; + display: none; + } + + header.navigation div.container nav.navbar ul.navbar-nav li.nav-item a { + visibility: hidden; + display: none; + } + + html:not(.theme--documenter-dark) body #documenter .docs-main { + margin-top: 2rem !important; + } +} + +@media all and (max-width: 1055px) and (min-width: 561px){ + header.navigation { + position: fixed !important; + left:0; + top: 0; + width: 100%; + } + + header.navigation div.container { + margin-left: 0rem; + } + + header.navigation div.container nav.navbar ul.navbar-nav { + width: 80% !important; + } +} + +@media all and (min-width: 1056px) { + header.navigation { + position: fixed !important; + left:0; + top: 0; + width: 100%; + } + + header.navigation div.container { + margin-left: 18rem; + } + +} + +html.theme--documenter-dark header.navigation { + background-color: #1f2424 !important; +} + +html.theme--documenter-dark header.navigation div.container { + border-bottom: 1px solid #5e6d6f; +} + +html.theme--documenter-dark header.navigation div.container nav.navbar { + background-color: #1f2424 !important; +} + +html.theme--documenter-dark header.navigation div.container nav.navbar ul.navbar-nav li.nav-item a.nav-link { + color: white; + transition: color 100ms; +} + +html.theme--documenter-dark header.navigation div.container nav.navbar ul.navbar-nav li.nav-item a.nav-link:hover { + color: #0aa8a7 +} + +html header.navigation { + background-color: white !important; +} + +html header.navigation div.container { + border-bottom: 1px solid #dbdbdb; +} + +html header.navigation div.container nav.navbar ul.navbar-nav li.nav-item a.nav-link { + color: #222; + transition: color 100ms; +} + +html header.navigation div.container nav.navbar ul.navbar-nav li.nav-item a.nav-link:hover { + color: #0aa8a7 +} + +header.navigation { + z-index: 3; +} + +header.navigation div.container nav.navbar ul.navbar-nav { + margin-left: 4rem; + min-height: 3.25rem; + width: 70%; + display: flex; + align-self: auto; + flex-direction: row; + justify-content: space-around; +} + +header.navigation div.container nav.navbar ul.navbar-nav li.nav-item { + align-self: stretch; + align-content: space-around; + justify-content: center; + display: flex; + flex-direction: column; +} + +header.navigation div.container nav.navbar ul.navbar-nav li.small-item { + visibility: hidden; + display: none; +} \ No newline at end of file diff --git a/docs/src/assets/header.js b/docs/src/assets/header.js new file mode 100644 index 000000000..2f211b385 --- /dev/null +++ b/docs/src/assets/header.js @@ -0,0 +1,98 @@ +// Source: +// https://github.com/ReactiveBayes/RxInfer.jl/blob/246b196a3ea29d0b5744ce241a923c7a3b30eaf4/docs/src/assets/header.js#L4 + +// We add a simple `onload` hook to inject the custom header for our `HTML`-generated pages +window.onload = function() { + //