Replies: 2 comments 3 replies
-
Taking the snippet, my immediate reaction is to for loop most of it, create accessible Symbols, make use of Below example code that should be treated as pseudo-code. I need similar code within the next quarter of a year, too, so I will surely think about that. I have get up to speed on RxInfer, though, so I cannot say how to apply clever meta/specification tricks, yet. @model function VAR(y, priors)
c = [zeros(size(priors[Symbol(:x, i))) for i in 1:5]
b = [zeros(length(y[1])) for _ in 1:5]
for i in 1:5
c[i][1] = 1.0
b[i][i] = 1.0
end
γ = [priors[Symbol(:γ, i)] for i in 1:5]
θ = [priors[Symbol(:θ, i)] for i in 1:5]
x_prev = [priors[Symbol(:x, i)] for i in 1:5]
γ_samples = [γ[i] ~ γ[i] for i in 1:5]
θ_samples = [θ[i] ~ θ[i] for i in 1:5]
x_prev_samples = [x_prev[i] ~ x_prev[i] for i in 1:5]
for i in eachindex(y)
x = [AR(x_prev[j], θ_samples[j], γ_samples[j]) for j in 1:5]
for j in 1:5
x[j] ~ AR(x_prev[j], θ_samples[j], γ_samples[j])
end
μ = sum(b[j] * dot(c[j], x[j]) for j in 1:5)
y[i] ~ MvNormal(μ = μ, Λ = diageye(5))
x_prev = x
end
end |
Beta Was this translation helpful? Give feedback.
-
Nice model @albertpod, here is my attempt to simplify it, first lest define auxiliary functions for your function form_priors(orders)
priors = (x = [], γ = [], θ = [])
for k in 1:length(orders)
push!(priors[:γ], GammaShapeRate(1.0, 1.0))
push!(priors[:x], MvNormalMeanPrecision(zeros(orders[k]), diageye(orders[k])))
push!(priors[:θ], MvNormalMeanPrecision(zeros(orders[k]), diageye(orders[k])))
end
return priors
end
function form_c_b(y, orders)
c = Any[]
b = Any[]
for k in 1:length(orders)
_c = ReactiveMP.ar_unit(Multivariate, orders[k])
_b = zeros(length(y[1])); _b[k] = 1.0
push!(c, _c)
push!(b, _b)
end
return c, b
end Next, we define a sub-model for a single AR-process @model function AR_sequence(x, index, length, priors, order)
γ ~ priors[:γ][index]
θ ~ priors[:θ][index]
x_prev ~ priors[:x][index]
for i in 1:length
x[index, i] ~ AR(x_prev, θ, γ) where {
meta = ARMeta(Multivariate, order, ARsafe())
}
x_prev = x[index, i]
end
end Next, we define a tricky dot sequence, since @model function dot_sequence(out, k, i, orders, x, c, b)
if k === length(orders)
out ~ b[k] * dot(c[k], x[k, i])
else
next ~ dot_sequence(k = k + 1, i = i, orders = orders, x = x, c = c, b = b)
out ~ b[k] * dot(c[k], x[k, i]) + next
end
end And here is our final model spec @model function VAR(y, orders)
priors = form_priors(orders)
c, b = form_c_b(y, orders)
y_length = length(y)
local x # `x` is being initialized in the loop within submodels
for k in 1:length(orders)
x ~ AR_sequence(index = k, length = y_length, priors = priors, order = orders[k])
end
for i in 1:y_length
μ[i] ~ dot_sequence(k = 1, i = i, orders = orders, x = x, c = c, b = b)
y[i] ~ MvNormal(μ = μ[i], Λ = 10 * diageye(length(orders)))
end
end Neat? :D Next challenge is to define constraints, but thanks to @wouterwln its pretty easy @constraints function var_constraints()
for q in AR_sequence
# This requires patch in GraphPPL though, see https://github.com/ReactiveBayes/GraphPPL.jl/issues/262
# A workaround is to use `constraints = MeanField()` in the `infer` function and initializing `q(x)` instead of `μ(x)`
q(x, x_prev, γ, θ) = q(x, x_prev)q(γ)q(θ)
end
end Final challenge is initialization @initialization function var_init(orders)
# This is a problem still
for init in AR_sequence
q(γ) = GammaShapeRate(1.0, 1.0)
q(θ) = MvNormalMeanPrecision(zeros(orders[1]), diageye(orders[1])) # `orders[1]` is sad... needs to be fixed
end
μ(x) = MvNormalMeanPrecision(zeros(orders[1]), diageye(orders[1]))
end which unfortunately doesn't allow me to initialize for different orders, so in my code I used orders = [ 4, 4, 4, .... ] at last mresult = infer(
model = VAR(orders = orders),
data = (y = observations, ),
constraints = var_constraints(),
initialization = var_init(orders),
returnvars = KeepLast(),
options = (limit_stack_depth = 100, ),
showprogress = true,
iterations = 50,
free_energy = false,
) which executed without errors. The remaining issue is the initialization, which probably can be circumvented with Full scriptusing Revise
using RxInfer, Random, Plots
# ===== Data Generation Section =====
function generate_ar_process(order, θ, n_samples; σ²=1.0)
x = zeros(n_samples)
# Initialize with random noise
x[1:order] = randn(order) * sqrt(σ²)
for t in (order+1):n_samples
# AR equation: x[t] = θ₁x[t-1] + θ₂x[t-2] + ... + θₚx[t-p] + ε[t]
x[t] = sum(θ[i] * x[t-i] for i in 1:order) + randn() * sqrt(σ²)
end
return x
end
# Set random seed for reproducibility
Random.seed!(123)
# Define orders for each process
orders = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40]
orders = 4 .* ones(Int, 20)
n_samples = 100
n_ar_processes = length(orders)
processes = []
# Generate AR parameters and data for each process
for (i, order) in enumerate(orders)
# Generate stable AR parameters (using a simple method)
θ = 0.5 .^ (1:order) # This ensures stability by having decreasing coefficients
# Generate the AR process
x = generate_ar_process(order, θ, n_samples)
push!(processes, x)
end
# Convert to the format needed for the model
true_data = [[processes[j][i] for j in 1:n_ar_processes] for i in 1:n_samples]
observations = Any[[true_data[i][j] .+ randn()*0.1 for j in 1:n_ar_processes] for i in 1:n_samples]
# Extend observations with missing values
n_missing = 20
for i in 101:n_samples+n_missing
push!(observations, missing)
end
function form_priors(orders)
priors = (x = [], γ = [], θ = [])
for k in 1:length(orders)
push!(priors[:γ], GammaShapeRate(1.0, 1.0))
push!(priors[:x], MvNormalMeanPrecision(zeros(orders[k]), diageye(orders[k])))
push!(priors[:θ], MvNormalMeanPrecision(zeros(orders[k]), diageye(orders[k])))
end
return priors
end
function form_c_b(y, orders)
c = Any[]
b = Any[]
for k in 1:length(orders)
_c = ReactiveMP.ar_unit(Multivariate, orders[k])
_b = zeros(length(y[1])); _b[k] = 1.0
push!(c, _c)
push!(b, _b)
end
return c, b
end
@model function AR_sequence(x, index, length, priors, order)
γ ~ priors[:γ][index]
θ ~ priors[:θ][index]
x_prev ~ priors[:x][index]
for i in 1:length
x[index, i] ~ AR(x_prev, θ, γ) where {
meta = ARMeta(Multivariate, order, ARsafe())
}
x_prev = x[index, i]
end
end
@model function dot_sequence(out, k, i, orders, x, c, b)
if k === length(orders)
out ~ b[k] * dot(c[k], x[k, i])
else
next ~ dot_sequence(k = k + 1, i = i, orders = orders, x = x, c = c, b = b)
out ~ b[k] * dot(c[k], x[k, i]) + next
end
end
@model function VAR(y, orders)
priors = form_priors(orders)
c, b = form_c_b(y, orders)
y_length = length(y)
local x # `x` is being initialized in the loop within submodels
for k in 1:length(orders)
x ~ AR_sequence(index = k, length = y_length, priors = priors, order = orders[k])
end
for i in 1:y_length
μ[i] ~ dot_sequence(k = 1, i = i, orders = orders, x = x, c = c, b = b)
y[i] ~ MvNormal(μ = μ[i], Λ = 10 * diageye(length(orders)))
end
end
@constraints function var_constraints()
for q in AR_sequence
# This requires patch in GraphPPL though
q(x, x_prev, γ, θ) = q(x, x_prev)q(γ)q(θ)
end
end
@initialization function var_init(orders)
# This is a problem still
for init in AR_sequence
q(γ) = GammaShapeRate(1.0, 1.0)
q(θ) = MvNormalMeanPrecision(zeros(orders[1]), diageye(orders[1]))
end
μ(x) = MvNormalMeanPrecision(zeros(orders[1]), diageye(orders[1]))
end
mresult = infer(
model = VAR(orders = orders),
data = (y = observations, ),
constraints = var_constraints(),
initialization = var_init(orders),
returnvars = KeepLast(),
options = (limit_stack_depth = 100, ),
showprogress = true,
iterations = 50,
free_energy = false,
)
# ===== Plotting =====
# Function to create a single plot for a given component
function plot_component(mresult, true_data, observations, component)
# Create the correct subscript symbol (e.g., :x₁, :x₂, etc.)
subscript = join([Char(0x2080 + parse(Int, d)) for d in string(component)])
plot(first.(mean.(mresult.posteriors[:x][component, :])),
ribbon = first.(var.(mresult.posteriors[:x][component, :])),
label = "x$component")
plot!(getindex.(true_data, component), label = "True data")
scatter!(getindex.(observations[1:n_samples], component), label = "Observations")
end
plot_component(mresult, true_data, observations, 15) |
Beta Was this translation helpful? Give feedback.
-
Hey folks!
I've got something cool to share (and a cry for help 😅). I've been working on a multivariate latent autoregressive model with a neat twist - instead of creating a MAR node that is hidden is someone's forks we can cheat a little and stack multiple latent AR nodes together, allowing different AR orders for each process (you can't do that in regular VAR/MAR implemetations). The good news: it works!
This lets us:
The bad news, as you'd see in the code:
The model definition... well, let's just say it's not going to win any beauty contests 😬. Here's a snippet of what I'm dealing with:
Before you ask - no, I didn't write those AR processes by hand. I got cursor to do the dirty work for me.
The Ask
If anyone has ideas on how to make this more elegant (read: less painful to look at), that'd be great. Specifically looking for:
Let's turn this monster into something beautiful together! 🚀
Full code available if anyone's brave enough to look at it 😅
Beta Was this translation helpful? Give feedback.
All reactions