Skip to content

Commit 4302741

Browse files
Run JuliaFormatter and add CI (#27)
* Run JuliaFormatter and add CI * Fix some URLs Co-authored-by: willtebbutt <[email protected]>
1 parent 0d51fca commit 4302741

File tree

6 files changed

+72
-58
lines changed

6 files changed

+72
-58
lines changed

.JuliaFormatter.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
style = "blue"

.github/workflows/Format.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: Format suggestions
2+
3+
on:
4+
pull_request:
5+
6+
jobs:
7+
format:
8+
runs-on: ubuntu-latest
9+
steps:
10+
- uses: actions/checkout@v2
11+
- uses: julia-actions/setup-julia@latest
12+
with:
13+
version: 1
14+
- run: |
15+
julia -e 'using Pkg; Pkg.add("JuliaFormatter")'
16+
julia -e 'using JuliaFormatter; format("."; verbose=true)'
17+
- uses: reviewdog/action-suggester@v1
18+
with:
19+
tool_name: JuliaFormatter
20+
fail_on_error: true

docs/make.jl

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,11 @@ using Documenter, BayesianLinearRegressors
33
makedocs(;
44
modules=[BayesianLinearRegressors],
55
format=Documenter.HTML(),
6-
pages=[
7-
"Home" => "index.md",
8-
],
9-
repo="https://github.com/willtebbutt/BayesianLinearRegressors.jl/blob/{commit}{path}#L{line}",
6+
pages=["Home" => "index.md"],
7+
repo="https://github.com/JuliaGaussianProcesses/BayesianLinearRegressors.jl/blob/{commit}{path}#L{line}",
108
sitename="BayesianLinearRegressors.jl",
119
authors="Will Tebbutt <[email protected]>",
1210
assets=String[],
1311
)
1412

15-
deploydocs(;
16-
repo="github.com/willtebbutt/BayesianLinearRegressors.jl",
17-
)
13+
deploydocs(; repo="github.com/JuliaGaussianProcesses/BayesianLinearRegressors.jl")

examples/nn-blr.jl

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,7 @@ using Statistics: mean, std
1515
# Create an MLP that you might have seen in the 90s.
1616
Dlat = 50;
1717
W1, b1 = randn(Dlat, 1), randn(Dlat);
18-
ϕ = Chain(
19-
x->reshape(x, 1, :),
20-
x->tanh.(W1 * x .+ b1),
21-
)
18+
ϕ = Chain(x -> reshape(x, 1, :), x -> tanh.(W1 * x .+ b1))
2219

2320
# Initialise the standard deviation of the observation noise. We will learn this.
2421
logσ = [log(1)]
@@ -34,7 +31,7 @@ blr = BayesianLinearRegressor(zeros(Dlat), Matrix{Float64}(I, Dlat, Dlat))
3431
function nn_blr_training_loop(x, y, pars, Nitr, opt)
3532
tr_nlml = Vector{Float64}(undef, Nitr)
3633
p = ProgressMeter.Progress(Nitr)
37-
for itr in 1:Nitr
34+
for itr in 1:Nitr
3835
nlml, back = Zygote.forward(Zygote.Params(pars)) do
3936
-logpdf(blr(ϕ(x), exp(2 * logσ[1])), y)
4037
end
@@ -44,7 +41,7 @@ function nn_blr_training_loop(x, y, pars, Nitr, opt)
4441
Flux.Optimise.update!(opt, par, g[par])
4542
end
4643
showvalues = [(:itr, itr), (:nlml, nlml), (:σ_ε, exp(logσ[1]))]
47-
ProgressMeter.next!(p; showvalues = showvalues)
44+
ProgressMeter.next!(p; showvalues=showvalues)
4845
end
4946
return tr_nlml
5047
end
@@ -71,12 +68,15 @@ plt = plot();
7168

7269
# Plot the true function with aleatoric uncertainty due to observation noise.
7370
plot!(plt, xte, sin.(xte); linecolor="purple", linewidth=1.5, label="sin");
74-
plot!(plt, xte, sin.(xte) .+ 0.3; linecolor="purple", linewidth=1.5, label="", );
71+
plot!(plt, xte, sin.(xte) .+ 0.3; linecolor="purple", linewidth=1.5, label="");
7572
plot!(plt, xte, sin.(xte) .- 0.3; linecolor="purple", linewidth=1.5, label="")
7673

7774
# Visualise the posterior marginal uncertainty via 3σ error bars.
7875
m_te, σ_te = mean.(ypr_te), std.(ypr_te)
79-
plot!(plt, xte, [m_te m_te];
76+
plot!(
77+
plt,
78+
xte,
79+
[m_te m_te];
8080
label="",
8181
fillrange=[m_te .+ 3 .* σ_te, m_te .- 3 .* σ_te],
8282
linewidth=0,
@@ -85,22 +85,15 @@ plot!(plt, xte, [m_te m_te];
8585
);
8686

8787
# Visualise the posterior distribution over the latent function via samples.
88-
plot!(plt, xte, y_samples_te[:, 1];
89-
linecolor="blue",
90-
linewidth=0.1,
91-
label="Posterior Samples",
92-
);
93-
plot!(plt, xte, y_samples_te;
94-
linecolor="blue",
95-
linewidth=0.1,
96-
label="",
88+
plot!(
89+
plt, xte, y_samples_te[:, 1]; linecolor="blue", linewidth=0.1, label="Posterior Samples"
9790
);
91+
plot!(plt, xte, y_samples_te; linecolor="blue", linewidth=0.1, label="");
9892

9993
# Plot the data.
10094
scatter!(plt, x, y; markercolor="red", label="y", markersize=0.1);
10195
display(plt);
10296

103-
10497
# Compute and display residuals
10598
ypr = marginals(blr′(ϕ(x), exp(2 * logσ[1])));
10699
ε = y .- mean.(ypr);

src/bayesian_linear_regression.jl

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ f(x) = dot(x, w)
88
```
99
where `mw` and `Λw` are the mean and precision of `w`, respectively.
1010
"""
11-
struct BayesianLinearRegressor{Tmw<:AbstractVector, TΛw<:AbstractMatrix} <: AbstractGP
11+
struct BayesianLinearRegressor{Tmw<:AbstractVector,TΛw<:AbstractMatrix} <: AbstractGP
1212
mw::Tmw
1313
Λw::TΛw
1414
end
@@ -35,7 +35,7 @@ AbstractGPs.mean_and_var(fx::FiniteBLR) = (mean(fx), var(fx))
3535

3636
function AbstractGPs.rand(rng::AbstractRNG, fx::FiniteBLR, samples::Int)
3737
w = fx.f.mw .+ _cholesky(fx.f.Λw).U \ randn(rng, size(fx.x.X, 1), samples)
38-
y = fx.x.X' * w .+ _cholesky(fx.Σy).U' * randn(rng, size(fx.x.X, 2), samples)
38+
return fx.x.X' * w .+ _cholesky(fx.Σy).U' * randn(rng, size(fx.x.X, 2), samples)
3939
end
4040

4141
function AbstractGPs.logpdf(fx::FiniteBLR, y::AbstractVector{<:Real})
@@ -84,7 +84,11 @@ end
8484
(s::BLRFunctionSample)(X::ColVecs) = X.X's.w
8585
(s::BLRFunctionSample)(X::RowVecs) = X.X * s.w
8686

87-
Random.Sampler(::Type{<:AbstractRNG}, blr::BayesianLinearRegressor, ::Random.Repetition) = blr
87+
function Random.Sampler(
88+
::Type{<:AbstractRNG}, blr::BayesianLinearRegressor, ::Random.Repetition
89+
)
90+
return blr
91+
end
8892

8993
function Random.rand(rng::AbstractRNG, blr::BayesianLinearRegressor)
9094
w = blr.mw .+ _cholesky(blr.Λw).U \ randn(rng, size(blr.mw))
@@ -97,10 +101,12 @@ function Random.rand(rng::AbstractRNG, blr::BayesianLinearRegressor, dims::Dims)
97101
return reshape(bs, dims)
98102
end
99103

100-
function Random.rand!(rng::AbstractRNG, A::AbstractArray{<:BLRFunctionSample}, blr::BayesianLinearRegressor)
104+
function Random.rand!(
105+
rng::AbstractRNG, A::AbstractArray{<:BLRFunctionSample}, blr::BayesianLinearRegressor
106+
)
101107
ws = blr.mw .+ _cholesky(blr.Λw).U \ randn(rng, (only(size(blr.mw)), prod(size(A))))
102108
for i in LinearIndices(A)
103-
@inbounds A[i] = BLRFunctionSample(ws[:,i])
109+
@inbounds A[i] = BLRFunctionSample(ws[:, i])
104110
end
105111
return A
106112
end

test/bayesian_linear_regression.jl

Lines changed: 26 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ end
1313
X, f, Σy = generate_toy_problem(rng, N, D)
1414

1515
AbstractGPs.TestUtils.test_finitegp_primary_and_secondary_public_interface(
16-
rng, f(X, Σy),
16+
rng, f(X, Σy)
1717
)
1818
end
1919
@testset "rand" begin
@@ -24,8 +24,8 @@ end
2424
Y = rand(rng, f(X, Σy), samples)
2525
m_empirical = mean(Y; dims=2)
2626
Σ_empirical = (Y .- mean(Y; dims=2)) * (Y .- mean(Y; dims=2))' ./ samples
27-
@test mean(f(X, Σy)) m_empirical atol=1e-3 rtol=1e-3
28-
@test cov(f(X, Σy)) Σ_empirical atol=1e-3 rtol=1e-3
27+
@test mean(f(X, Σy)) m_empirical atol = 1e-3 rtol = 1e-3
28+
@test cov(f(X, Σy)) Σ_empirical atol = 1e-3 rtol = 1e-3
2929

3030
@testset "Zygote (everything dense)" begin
3131
function rand_blr(X, A_Σy, mw, A_Λw)
@@ -45,10 +45,10 @@ end
4545

4646
# Verify adjoints via finite differencing.
4747
fdm = central_fdm(5, 1)
48-
@test dX first(j′vp(fdm, X->rand_blr(X, A_Σy, mw, A_Λw), z̄, X))
49-
@test dA_Σy first(j′vp(fdm, A_Σy->rand_blr(X, A_Σy, mw, A_Λw), z̄, A_Σy))
50-
@test dmw first(j′vp(fdm, mw->rand_blr(X, A_Σy, mw, A_Λw), z̄, mw))
51-
@test dA_Λw first(j′vp(fdm, A_Λw->rand_blr(X, A_Σy, mw, A_Λw), z̄, A_Λw))
48+
@test dX first(j′vp(fdm, X -> rand_blr(X, A_Σy, mw, A_Λw), z̄, X))
49+
@test dA_Σy first(j′vp(fdm, A_Σy -> rand_blr(X, A_Σy, mw, A_Λw), z̄, A_Σy))
50+
@test dmw first(j′vp(fdm, mw -> rand_blr(X, A_Σy, mw, A_Λw), z̄, mw))
51+
@test dA_Λw first(j′vp(fdm, A_Λw -> rand_blr(X, A_Σy, mw, A_Λw), z̄, A_Λw))
5252
end
5353
end
5454
@testset "logpdf" begin
@@ -79,11 +79,13 @@ end
7979

8080
# Check correctness via finite differencing.
8181
fdm = central_fdm(5, 1)
82-
@test dX first(j′vp(fdm, X->logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, X))
83-
@test dA_Σy first(j′vp(fdm, A_Σy->logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, A_Σy))
84-
@test dy first(j′vp(fdm, y->logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, y))
85-
@test dmw first(j′vp(fdm, mw->logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, mw))
86-
@test dA_Λw first(j′vp(fdm, A_Λw->logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, A_Λw))
82+
@test dX first(j′vp(fdm, X -> logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, X))
83+
@test dA_Σy
84+
first(j′vp(fdm, A_Σy -> logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, A_Σy))
85+
@test dy first(j′vp(fdm, y -> logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, y))
86+
@test dmw first(j′vp(fdm, mw -> logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, mw))
87+
@test dA_Λw
88+
first(j′vp(fdm, A_Λw -> logpdf_blr(X, A_Σy, y, mw, A_Λw), z̄, A_Λw))
8789
end
8890
end
8991
@testset "posterior" begin
@@ -105,14 +107,11 @@ end
105107
# Chop up the noise because we can't condition on noise that's correlated
106108
# between things.
107109
N1 = N - 3
108-
Σ1, Σ2 = Σy[1:N1, 1:N1], Σy[N1+1:end, N1+1:end]
109-
Σy′ = vcat(
110-
hcat(Σ1, zeros(N1, N - N1)),
111-
hcat(zeros(N - N1, N1), Σ2),
112-
)
110+
Σ1, Σ2 = Σy[1:N1, 1:N1], Σy[(N1 + 1):end, (N1 + 1):end]
111+
Σy′ = vcat(hcat(Σ1, zeros(N1, N - N1)), hcat(zeros(N - N1, N1), Σ2))
113112

114-
X1, X2 = X[:, 1:N1], X[:, N1+1:end]
115-
y1, y2 = y[1:N1], y[N1+1:end]
113+
X1, X2 = X[:, 1:N1], X[:, (N1 + 1):end]
114+
y1, y2 = y[1:N1], y[(N1 + 1):end]
116115

117116
f′1 = posterior(f(X1, Σ1), y1)
118117
f′2 = posterior(f′1(X2, Σ2), y2)
@@ -135,7 +134,8 @@ end
135134
@test g(X) == g(Xr)
136135

137136
# test the Random interface
138-
@test rand(rng, Random.Sampler(rng, f, Val(Inf))) isa BayesianLinearRegressors.BLRFunctionSample
137+
@test rand(rng, Random.Sampler(rng, f, Val(Inf))) isa
138+
BayesianLinearRegressors.BLRFunctionSample
139139

140140
samples1, samples2 = 10_000, 1000
141141
samples = samples1 * samples2
@@ -145,23 +145,21 @@ end
145145
# test statistical properties of the sampled functions
146146
let
147147
Y = reduce(hcat, map(h -> h(X), reshape(gs, :)))
148-
m_empirical = mean(Y; dims = 2)
149-
Σ_empirical = (Y .- mean(Y; dims = 2)) * (Y .- mean(Y; dims = 2))' ./ samples
148+
m_empirical = mean(Y; dims=2)
149+
Σ_empirical = (Y .- mean(Y; dims=2)) * (Y .- mean(Y; dims=2))' ./ samples
150150
@test mean(f(X, Σy)) m_empirical atol = 1e-3 rtol = 1e-3
151151
@test cov(f(X, Σy)) Σ_empirical + Σy atol = 1e-3 rtol = 1e-3
152152
end
153153

154154
# test statistical properties of in-place rand
155155
let
156156
A = Array{BayesianLinearRegressors.BLRFunctionSample,2}(
157-
undef,
158-
samples1,
159-
samples2,
157+
undef, samples1, samples2
160158
)
161159
A = rand!(rng, A, f)
162160
Y = reduce(hcat, map(h -> h(X), reshape(gs, :)))
163-
m_empirical = mean(Y; dims = 2)
164-
Σ_empirical = (Y .- mean(Y; dims = 2)) * (Y .- mean(Y; dims = 2))' ./ samples
161+
m_empirical = mean(Y; dims=2)
162+
Σ_empirical = (Y .- mean(Y; dims=2)) * (Y .- mean(Y; dims=2))' ./ samples
165163
@test mean(f(X, Σy)) m_empirical atol = 1e-3 rtol = 1e-3
166164
@test cov(f(X, Σy)) Σ_empirical + Σy atol = 1e-3 rtol = 1e-3
167165
end
@@ -185,7 +183,7 @@ end
185183
@test dX first(j′vp(fdm, X -> sample_function(X, mw, A_Λw), z̄, X))
186184
@test dmw first(j′vp(fdm, mw -> sample_function(X, mw, A_Λw), z̄, mw))
187185
@test dA_Λw
188-
first(j′vp(fdm, A_Λw -> sample_function(X, mw, A_Λw), z̄, A_Λw))
186+
first(j′vp(fdm, A_Λw -> sample_function(X, mw, A_Λw), z̄, A_Λw))
189187
end
190188

191189
function rand_funcs_single(X, mw, A_Λw)

0 commit comments

Comments
 (0)