Skip to content

Commit

Permalink
Merge pull request #26 from mschauer/docstrings
Browse files Browse the repository at this point in the history
Adding Docstrings
  • Loading branch information
mschauer authored Jun 26, 2024
2 parents 36cf959 + ee620e4 commit 6f7c3b5
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 0 deletions.
42 changes: 42 additions & 0 deletions src/rules.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,24 @@ end
backward(method::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::Leaf; kargs...) = backward(method, k, q[]; kargs...)
backward(method::BFFG, k, q::Leaf; kargs...) = backward(method, k, q[]; kargs...)

"""
backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::WGaussian{(:F,:Γ,:c)};
For a Markov kernel `k` of the form `x ↦ N(Bx + β, Q)` this function computes
`x ↦ k q = ∫ q(y) pdf(N(Bx + β, Q), y) dy` in the form `q0(y) = exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)`.
Requires invertibility of `Γ`.
If `unfused=true` avoid a call to `logdet(B)` allowing to handle singular or rectangular `B`
at a computational cost.
Returns a message objects for forward guiding and `q0`.
Arguments:
* `k` a kernel such that `Y ~ N(B*x + β, Q)`
* `q::WGaussian{(:F,:Γ,:c)}` is the (unnormalized) density q(y) = exp(c)⋅pdf(N(Γ \\ F, inv(Γ)), y)`
"""
function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q::WGaussian{(:F,:Γ,:c)}; unfused=false)
@unpack F, Γ, c = q
# Theorem 7.1 [Automatic BFFG]
Expand All @@ -69,6 +86,20 @@ function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, q
message(q0, q), q0
end

"""
backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, y; unfused=false)
For a Markov kernel `k` of the form `x ↦ N(Bx + β, Q)` this function computes the function
`x ↦ pdf(N(Bx + β, Q), y)` and returns it as a `WGaussian` in the form exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)`
wrapped in a `Leaf` object.
If `unfused=true` avoid a call to `logdet(B)`. This function is supposed to be called with observations
instead of the function taking in a `WGaussian` argument.
Arguments:
* `k` a kernel such that `Y ~ N(B*x + β, Q)`
"""
function backward(::BFFG, k::Union{AffineGaussianKernel,LinearGaussianKernel}, y; unfused=false)
# Theorem 7.1 [Automatic BFFG]
B, β, Q = params(k)
Expand Down Expand Up @@ -182,6 +213,7 @@ function backward(::BFFG, ::Copy, args::Union{Leaf{<:WGaussian{(:μ,:Σ,:c)}},WG
end



function backward(::Union{BFFG,BF}, ::Copy, a::Gaussian{(:F,:Γ)}, args...)
F, H = params(a)
for b in args
Expand All @@ -192,6 +224,16 @@ function backward(::Union{BFFG,BF}, ::Copy, a::Gaussian{(:F,:Γ)}, args...)
message(), Gaussian{(:F,:Γ)}(F, H)
end

"""
backward(::BFFG, ::Copy, a::Union{Leaf{<:WGaussian{(:F,:Γ,:c)}}, WGaussian{(:F,:Γ,:c)}}, args...)
For a Markov kernel `k::Copy` that represents the deterministic function `x ↦ Dirac((x, ..., x))`
this function computes the corresponding pullback `k(h1, ..., hn) = h1(x)⋅...⋅hn(x)` and returns it as a `WGaussian` in the form exp(c0)⋅pdf(N(Γ0 \\ F0, inv(Γ0)), y)`.
From a Bayesian perspective, this performs *fusion* of the information about a value `x` given in form
of a tuple of unormalizes densities. If an argument is wrapped in a `Leaf` object, do the right thing.
This corresponds to adding all adjoints stemming of different uses of a variable in automatic differentiation.
"""
function backward(::BFFG, ::Copy, a::Union{Leaf{<:WGaussian{(:F,:Γ,:c)}}, WGaussian{(:F,:Γ,:c)}}, args...; unfused=true)
unfused = false
F, H, c = params(convert(WGaussian{(:F,,:c)}, a))
Expand Down
5 changes: 5 additions & 0 deletions src/wgaussian.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import Statistics: mean, cov
import Random.rand
"""
WGaussian(;μ, Σ, c)
Creates a function equal to the density of `N(μ, Σ)` scaled by `exp(c)``.
"""
struct WGaussian{P,T} <: AbstractMeasure
par::NamedTuple{P,T}
end
Expand Down

0 comments on commit 6f7c3b5

Please sign in to comment.