diff --git a/src/rules.jl b/src/rules.jl index e6b2fa1..7af0685 100644 --- a/src/rules.jl +++ b/src/rules.jl @@ -49,7 +49,24 @@ end backward(method::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::Leaf; kargs...) = backward(method, k, q[]; kargs...) backward(method::BFFG, k, q::Leaf; kargs...) = backward(method, k, q[]; kargs...) +""" + backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::WGaussian{(:F,:Γ,:c)}; +For a Markov kernel `k` of the form `x ↦ N(Bx + β, Q)` this function computes +`x ↦ k q = ∫ q(y) pdf(N(Bx + β, Q), y) dy` in the form `q0(y) = exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)`. +Requires invertibility of `Γ`. + + +If `unfused=true` avoid a call to `logdet(B)` allowing to handle singular or rectangular `B` +at a computational cost. + +Returns a message objects for forward guiding and `q0`. + +Arguments: + +* `k` a kernel such that `Y ~ N(B*x + β, Q)` +* `q::WGaussian{(:F,:Γ,:c)}` is the (unnormalized) density q(y) = exp(c)⋅pdf(N(Γ \\ F, inv(Γ)), y)` +""" function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::WGaussian{(:F,:Γ,:c)}; unfused=false) @unpack F, Γ, c = q # Theorem 7.1 [Automatic BFFG] @@ -69,6 +86,20 @@ function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q message(q0, q), q0 end +""" + backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, y; unfused=false) + +For a Markov kernel `k` of the form `x ↦ N(Bx + β, Q)` this function computes the function +`x ↦ pdf(N(Bx + β, Q), y)` and returns it as a `WGaussian` in the form exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)` +wrapped in a `Leaf` object. + +If `unfused=true` avoid a call to `logdet(B)`. This function is supposed to be called with observations +instead of the function taking in a `WGaussian` argument. + +Arguments: + +* `k` a kernel such that `Y ~ N(B*x + β, Q)` +""" function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, y; unfused=false) # Theorem 7.1 [Automatic BFFG] B, β, Q = params(k) @@ -182,6 +213,7 @@ function backward(::BFFG, ::Copy, args::Union{Leaf{<:WGaussian{(:μ,:Σ,:c)}},WG end + function backward(::Union{BFFG,BF}, ::Copy, a::Gaussian{(:F,:Γ)}, args...) F, H = params(a) for b in args @@ -192,6 +224,16 @@ function backward(::Union{BFFG,BF}, ::Copy, a::Gaussian{(:F,:Γ)}, args...) message(), Gaussian{(:F,:Γ)}(F, H) end +""" + backward(::BFFG, ::Copy, a::Union{Leaf{<:WGaussian{(:F,:Γ,:c)}}, WGaussian{(:F,:Γ,:c)}}, args...) + +For a Markov kernel `k::Copy` that represents the deterministic function `x ↦ Dirac((x, ..., x))` +this function computes the corresponding pullback `k(h1, ..., hn) = h1(x)⋅...⋅hn(x)` and returns it as a `WGaussian` in the form exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)`. +From a Bayesian perspective, this performs *fusion* of the information about a value `x` given in form +of a tuple of unormalizes densities. If an argument is wrapped in a `Leaf` object, do the right thing. + +This corresponds to adding all adjoints stemming of different uses of a variable in automatic differentiation. +""" function backward(::BFFG, ::Copy, a::Union{Leaf{<:WGaussian{(:F,:Γ,:c)}}, WGaussian{(:F,:Γ,:c)}}, args...; unfused=true) unfused = false F, H, c = params(convert(WGaussian{(:F,:Γ,:c)}, a)) diff --git a/src/wgaussian.jl b/src/wgaussian.jl index 9e85c17..13c3bd3 100644 --- a/src/wgaussian.jl +++ b/src/wgaussian.jl @@ -1,5 +1,10 @@ import Statistics: mean, cov import Random.rand +""" + WGaussian(;μ, Σ, c) + +Creates a function equal to the density of `N(μ, Σ)` scaled by `exp(c)``. +""" struct WGaussian{P,T} <: AbstractMeasure par::NamedTuple{P,T} end