forked from JuliaStats/Distributions.jl
- Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathfunctionals.jl
34 lines (29 loc) · 1.49 KB
/
functionals.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
functionexpectation(g, distr::ContinuousUnivariateDistribution; kwargs...)
returnfirst(quadgk(x ->pdf(distr, x) *g(x), extrema(distr)...; kwargs...))
end
## Assuming that discrete distributions only take integer values.
functionexpectation(g, distr::DiscreteUnivariateDistribution; epsilon::Real=1e-10)
mindist, maxdist =extrema(distr)
# We want to avoid taking values up to infinity
minval =isfinite(mindist) ? mindist :quantile(distr, epsilon)
maxval =isfinite(maxdist) ? maxdist :quantile(distr, 1- epsilon)
returnsum(x ->pdf(distr, x) *g(x), minval:maxval)
end
functionexpectation(g, distr::MultivariateDistribution; nsamples::Int=100, rng::AbstractRNG=default_rng())
nsamples >0||throw(ArgumentError("number of samples should be > 0"))
# We use a function barrier to work around type instability of `sampler(dist)`
returnmcexpectation(rng, g, sampler(distr), nsamples)
end
mcexpectation(rng, f, sampler, n) =sum(f, rand(rng, sampler) for _ in1:n) / n
## Leave undefined until we've implemented a numerical integration procedure
# function entropy(distr::UnivariateDistribution)
# pf = typeof(distr)<:ContinuousDistribution ? pdf : pmf
# f = x -> pf(distr, x)
# expectation(distr, x -> -log(f(x)))
# end
functionkldivergence(p::Distribution{V}, q::Distribution{V}; kwargs...) where {V<:VariateForm}
returnexpectation(p; kwargs...) do x
logp =logpdf(p, x)
return (logp >oftype(logp, -Inf)) * (logp -logpdf(q, x))
end
end