using Pkg Pkg.activate("../../../lessons/") Pkg.instantiate(); using CSV using Random using DataFrames using LinearAlgebra using SpecialFunctions using Distributions using ReactiveMP using RxInfer using Plots default(label="", linewidth=4, margin=10Plots.pt) import CairoMakie: tricontourf import ReactiveMP: @call_rule, prod, ClosedProd @model function beta_bernoulli() "Beta-Bernoulli model with single observation" # Allocate data variable X = datavar(Int64) # Prior distribution θ ~ Beta(1.0, 1.0) # Likelihood of data point X ~ Bernoulli(θ) return X,θ end results = inference( model = beta_bernoulli(), data = (X = 1,), ) message1 = @call_rule Beta(:out, Marginalisation) (m_a = PointMass(1.0), m_b = PointMass(1.0)) message2 = @call_rule Bernoulli(:p, Marginalisation) (m_out = PointMass(1),) # Sample space of random variable θ_range = range(0, step=0.01, stop=1.0) # Plot messages plot( θ_range, x -> pdf.(message1, x), color="red", label="Prior-based message", xlabel="θ", ylabel="p(θ)") plot!(θ_range, x -> pdf.(message2, x), color="blue", label="Likelihood-based message", legend=:topleft, size=(800,400)) prod(ClosedProd(), message1, message2) posterior = results.posteriors[:θ] plot( θ_range, x -> pdf.(message1, x), color="red", label="Prior-based message", xlabel="θ", ylabel="p(θ)") plot!(θ_range, x -> pdf.(message2, x), color="blue", linewidth=8, linestyle=:dash, alpha=0.5, label="Likelihood-based message", legend=:topleft,size=(800,400)) plot!(θ_range, x -> pdf.(posterior, x), color="purple", label="Marginal posterior") @model function beta_bernoulli(N) "Beta-Bernoulli model with multiple observations" # Allocate data variable X = datavar(Int64, N) # Prior distribution θ ~ Beta(3.0, 2.0) # Loop over data for i in 1:N # Likelihood of i-th data points X[i] ~ Bernoulli(θ) end return X,θ end X = [1, 0]; N = length(X); results = inference( model = beta_bernoulli(N), data = (X = X,), ) message1 = @call_rule Bernoulli(:p, Marginalisation) (m_out = PointMass(X[1]),) message2 = @call_rule Bernoulli(:p, Marginalisation) (m_out = PointMass(X[2]),) message3 = prod(ClosedProd(), message1, message2) message4 = Beta(3.0, 2.0) posterior = prod(ClosedProd(), message3, message4) results.posteriors[:θ] plot( θ_range, x -> pdf.(message1, x), color="red", label="Message for X₁", xlabel="θ", ylabel="p(θ)") plot!(θ_range, x -> pdf.(message2, x), color="blue", label="Message for X₂", size=(800,400)) plot!(θ_range, x -> pdf.(message3, x), color="purple", label="Total likelihood message") # Plot prior-based message plot( θ_range, x -> pdf(message4, x), color="red", linewidth=3, label="Prior", xlabel="θ", ylabel="p(θ)") # plot likelihood-based message plot!(θ_range, x -> pdf(message3, x), color="blue", linewidth=3, label="Likelihood", size=(800,400)) # Plot marginal posterior plot!(θ_range, x -> pdf(posterior, x), color="purple", linewidth=4, linestyle=:dash, label="Posterior") @model function dirichlet_categorical(α; N=1) # Preallocate variables X = datavar(Vector{Int64}, N) # Prior distribution θ ~ Dirichlet(α) # Likelihood for i in 1:N X[i] ~ Categorical(θ) end return X,θ end X = [[0, 1, 0], [0, 0, 1], [0, 0, 1]]; N = length(X); message1 = @call_rule Categorical(:p, Marginalisation) (q_out = PointMass(X[1]),); message2 = @call_rule Categorical(:p, Marginalisation) (q_out = PointMass(X[2]),); message3 = @call_rule Categorical(:p, Marginalisation) (q_out = PointMass(X[3]),); # Prior concentration parameters α0 = [1.0, 2.0, 1.0]; results = inference( model = dirichlet_categorical(α0, N=N), data = (X = X,), ) # Load pre-generated triangular mesh mesh = Matrix(DataFrame(CSV.File("../datasets/trimesh.csv"))) # Compute probabilities on trimesh of simplex pvals = [pdf(Dirichlet(α0), mesh[n,3:5]) for n in 1:size(mesh,1)] # Generate filled contour plot tricontourf(mesh[:,1], mesh[:,2], pvals) # Extract parameters αN = params(results.posteriors[:θ])[1] # Compute probabilities on trimesh of simplex pvals = [pdf(Dirichlet(αN), mesh[n,3:5]) for n in 1:size(mesh,1)] # Generate filled contour plot tricontourf(mesh[:,1], mesh[:,2], pvals) @model function normal_normal(m0, v0, σ; N=1) # Allocate data variable X = datavar(Float64, N) # Prior distribution θ ~ Normal(mean = m0, variance = v0) # Likelihood for i = 1:N X[i] ~ Normal(mean = θ, variance = σ^2) end return X,θ end σ = 2.0; X = [ 52.390036995147426 74.49846899398719 50.92640384934159 39.548361884989717]; N = length(X); m0 = 60; v0 = 20; results = inference( model = normal_normal(m0, v0, σ, N=N), data = (X = X,), ) posterior = results.posteriors[:θ] mean_var(posterior) message = @call_rule NormalMeanVariance(:μ, Marginalisation) (m_out=PointMass(X[1]), m_v=PointMass(1.5^2)) for i in 2:N message_i = @call_rule NormalMeanVariance(:μ, Marginalisation) (m_out=PointMass(X[i]), m_v=PointMass(1.5^2)) message = prod(ClosedProd(), message, message_i) end mean_var(message) # Range of values to plot pdf for θ_range = range(50.0, step=0.1, stop=65.0) # Prior plot( θ_range, x -> pdf(Normal(m0, sqrt(v0)), x), color="red", label="Prior", xlabel="θ", ylabel="p(θ)") # Likelihood plot!(θ_range, x -> pdf(message, x), color="blue", label="Likelihood") # Posterior plot!(θ_range, x -> pdf(posterior, x), color="purple", linestyle=:dash, label="Posterior", size=(800,300))