from __future__ import print_function
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
from scipy import constants
import seaborn as sns
sns.set_style("darkgrid")
import scipy
alpha = 1.4
f = lambda x : np.sin(alpha * x)
x = np.linspace(0, np.pi, 8)
x_fine = np.linspace(-.5,np.pi +.5,1000)
y = np.sin(1.4*x)
#plot
sns.plt.plot(x,y, 'o', zorder=2)
sns.plt.plot(x_fine, f(x_fine), zorder=1)
plt.ylim(-1.1, 1.1)
plt.xlim(min(x_fine), x_fine.max());
plt.title('original curve with points');
For each dot here generate an ensemble of possible measurments. They will be distributed with a simple gaussian first in this example.
sigma = .01
measurements = np.array([np.random.normal(mu, scale=sigma, size=15) for mu in y])
#measurements comparison with original curve
plt.plot(x, measurements, 'o', c='indianred')
plt.plot(x_fine, f(x_fine))
plt.title('Measurements');
plt.xlim(min(x_fine), max(x_fine));
import pymc as pm
There are several unkowns in this example now. First the two variables $\alpha$ and $\phi$ from the original sin curve and then $\sigma$ the standard deviation of the error for each measurement.
alpha = pm.Normal('alpha', 3, 1)
tau = pm.Normal('tau', 5, 10)
obs = [pm.CommonDeterministics.Lambda('obs_{}'.format(i),
lambda alpha=alpha : np.sin(alpha*xx))
for i, xx in enumerate(x)]
noises = [pm.Normal('noise_{}'.format(i), o, tau,
value=m, observed=True) for i, (m, o) in
enumerate(zip(measurements, obs))]
var_list = [tau, alpha]
var_list.extend(obs)
var_list.extend(noises)
model = pm.Model(var_list)
map_ = pm.MAP(model)
map_.fit()
mcmc = pm.MCMC(model)
mcmc.sample(100000, 90000)
[-----------------100%-----------------] 100000 of 100000 complete in 15.0 sec
pm.Matplot.plot(mcmc.trace("alpha"))
Plotting alpha
pm.Matplot.plot(mcmc.trace("tau"))
Plotting tau
from scipy.stats.mstats import mquantiles
alpha_ = mcmc.trace('alpha')[:]
alpha_mean = np.mean(alpha_)
alpha_qs = mquantiles(alpha_, [.025, .975])
#plot CI for alpha value
plt.fill_between(x_fine, np.sin(alpha_qs[0]*x_fine),
y2=np.sin(alpha_qs[1]*x_fine),
color='#7A68A6')
plt.plot(x_fine,np.sin(alpha_qs[0]*x_fine), color='#7A68A6',
label='95% CI')
plt.plot(x_fine, np.sin(1.4*x_fine), label='input', zorder=1,
c='indianred')
plt.plot(x, measurements, 'o',zorder=-1,
color='blue')
plt.ylim(-1.4, 1.4)
plt.legend(loc='lower right')
<matplotlib.legend.Legend at 0x7fa8d9b41090>
betas = 1 / mcmc.trace('tau')[:]
betas_mean = np.mean(betas)
betas_qs = mquantiles(betas, [.025,.975])
# widest and narrowst error distribution
def plot_error_distri(idx):
mu = np.sin(1.4*x[idx])
m = measurements[idx]
xx = np.linspace(mu-1, mu+1,100)
plt.plot(xx, scipy.stats.norm.pdf(xx, mu, betas.max()), label='max')
plt.plot(xx, scipy.stats.norm.pdf(xx, mu, betas.min()), label='min')
plt.plot(xx, scipy.stats.norm.pdf(xx, mu, betas.mean()), label='mean')
plt.plot(xx, scipy.stats.norm.pdf(xx, mu, .2), label='Input')
plt.plot(m, np.ones(len(m))*.1, 'o')
plt.xlim(mu-1, mu+1)
plt.legend();
plt.title('idx = {}'.format(idx))
plt.figure(figsize=(10,10))
for i in range(len(measurements)):
plt.subplot(4,2,i+1)
plot_error_distri(i)
plt.tight_layout();