Skip to content

Commit eabf965

Browse files
Vector Inputs (#29)
* Test ColVecs * Test RowVecs * Bump patch * Update docs * Remove redundant code * Apply formatting suggestions Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> * Simplify RowVecs implementation * Update src/bayesian_linear_regression.jl Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> * Add error test * Apply suggestions from code review Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
1 parent 0dd9735 commit eabf965

File tree

4 files changed

+241
-185
lines changed

4 files changed

+241
-185
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "BayesianLinearRegressors"
22
uuid = "f579363c-4606-5e5c-a623-c4549f609c4b"
33
authors = ["Will Tebbutt <[email protected]>"]
4-
version = "0.3.4"
4+
version = "0.3.5"
55

66
[deps]
77
AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"

README.md

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,15 @@ The interface sits at roughly the same level as that of [Distributions.jl](https
1717

1818
## Conventions
1919

20-
A `BayesianLinearRegressor` in `D` dimensions works with data where:
21-
- inputs `X` should be a `D x N` matrix of `Real`s where each column is from one data point.
22-
- outputs `y` should be an `N`-vector of `Real`s, where each element is from one data point.
20+
`BayesianLinearRegressors` is consistent with `AbstractGPs`.
21+
Consequently, a `BayesianLinearRegressor` in `D` dimensions can work with the following input types:
22+
1. `ColVecs` -- a wrapper around an `D x N` matrix of `Real`s saying that each column should be interpreted as an input.
23+
2. `RowVecs`s -- a wrapper around an `N x D` matrix of `Real`s, saying that each row should be interpreted as an input.
24+
3. `Matrix{<:Real}` -- must be `D x N`. Prefer using `ColVecs` or `RowVecs` for the sake of being explicit.
25+
26+
Consult the `Design` section of the [KernelFunctions.jl](https://juliagaussianprocesses.github.io/KernelFunctions.jl/dev/design/) docs for more info on these conventions.
27+
28+
Outputs for a BayesianLinearRegressor should be an `AbstractVector{<:Real}` of length `N`.
2329

2430
## Example Usage
2531

@@ -38,7 +44,7 @@ f = BayesianLinearRegressor(mw, Λw)
3844

3945
# Index into the regressor and assume heterscedastic observation noise `Σ_noise`.
4046
N = 10
41-
X = collect(hcat(collect(range(-5.0, 5.0, length=N)), ones(N))')
47+
X = ColVecs(collect(hcat(collect(range(-5.0, 5.0, length=N)), ones(N))'))
4248
Σ_noise = Diagonal(exp.(randn(N)))
4349
fX = f(X, Σ_noise)
4450

@@ -70,7 +76,7 @@ logpdf(f′(X, Σ_noise), y)
7076

7177
# Sample from the posterior predictive distribution.
7278
N_plt = 1000
73-
X_plt = hcat(collect(range(-6.0, 6.0, length=N_plt)), ones(N_plt))'
79+
X_plt = ColVecs(hcat(collect(range(-6.0, 6.0, length=N_plt)), ones(N_plt))')
7480
f′X_plt = rand(rng, f′(X_plt, eps()), 100) # Samples with machine-epsilon noise for stability
7581

7682
# Compute some posterior marginal statisics.

src/bayesian_linear_regression.jl

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,28 @@ const FiniteBLR = FiniteGP{<:BayesianLinearRegressor}
1717

1818
# All code below implements the primary + secondary AbstractGPs.jl APIs.
1919

20-
AbstractGPs.mean(fx::FiniteBLR) = fx.x.X' * fx.f.mw
20+
x_as_colvecs(fx::FiniteBLR) = x_as_colvecs(fx.x)
21+
22+
x_as_colvecs(x::ColVecs) = x
23+
24+
x_as_colvecs(x::RowVecs) = ColVecs(x.X')
25+
26+
function x_as_colvecs(x::T) where {T<:AbstractVector}
27+
return error(
28+
"$T is not a subtype of AbstractVector that is known. Please provide either a",
29+
"ColVecs or RowVecs.",
30+
)
31+
end
32+
33+
AbstractGPs.mean(fx::FiniteBLR) = x_as_colvecs(fx).X' * fx.f.mw
2134

2235
function AbstractGPs.cov(fx::FiniteBLR)
23-
α = _cholesky(fx.f.Λw).U' \ fx.x.X
36+
α = _cholesky(fx.f.Λw).U' \ x_as_colvecs(fx).X
2437
return Symmetric' * α + fx.Σy)
2538
end
2639

2740
function AbstractGPs.var(fx::FiniteBLR)
28-
α = _cholesky(fx.f.Λw).U' \ fx.x.X
41+
α = _cholesky(fx.f.Λw).U' \ x_as_colvecs(fx).X
2942
return vec(sum(abs2, α; dims=1)) .+ diag(fx.Σy)
3043
end
3144

@@ -34,8 +47,9 @@ AbstractGPs.mean_and_cov(fx::FiniteBLR) = (mean(fx), cov(fx))
3447
AbstractGPs.mean_and_var(fx::FiniteBLR) = (mean(fx), var(fx))
3548

3649
function AbstractGPs.rand(rng::AbstractRNG, fx::FiniteBLR, samples::Int)
37-
w = fx.f.mw .+ _cholesky(fx.f.Λw).U \ randn(rng, size(fx.x.X, 1), samples)
38-
return fx.x.X' * w .+ _cholesky(fx.Σy).U' * randn(rng, size(fx.x.X, 2), samples)
50+
X = x_as_colvecs(fx).X
51+
w = fx.f.mw .+ _cholesky(fx.f.Λw).U \ randn(rng, size(X, 1), samples)
52+
return X' * w .+ _cholesky(fx.Σy).U' * randn(rng, size(X, 2), samples)
3953
end
4054

4155
function AbstractGPs.logpdf(fx::FiniteBLR, y::AbstractVector{<:Real})
@@ -56,9 +70,9 @@ end
5670

5771
# Computation utilised in both `logpdf` and `posterior`.
5872
function __compute_inference_quantities(fx::FiniteBLR, y::AbstractVector{<:Real})
59-
length(y) == size(fx.x.X, 2) || throw(error("length(y) != size(fx.x.X, 2)"))
73+
X = x_as_colvecs(fx).X
74+
length(y) == size(X, 2) || throw(error("length(y) != size(fx.x.X, 2)"))
6075
blr = fx.f
61-
X = fx.x.X
6276
N = length(y)
6377

6478
Uw = _cholesky(blr.Λw).U

0 commit comments

Comments
 (0)