import numpy as np #from itertools import accumulate def resample(w,sysnoise=True): if(sysnoise): r=np.random.uniform() else: r=0.0 cum=[0]+[int(sum(w[:i])*len(w)/sum(w)-r) for i in xrange(len(w)-1)]+[len(w)] # print "cum",cum return reduce(lambda a,b:a+b,[[i for j in xrange(cum[i],cum[i+1])] for i in xrange(len(w))]) def genlikehood(y,sigma): return lambda t,x:np.exp(-np.dot(y[t,]-x,y[t,]-x)/(2*sigma**2)) def ParticleFilter(init,particlesize,dim,datasize,odein,likehood,lag=0,debugout=True,printout=False,sigma=0.0,sysnoise=True): totloglikehood=[] dims=(particlesize,lag+1,dim) x0=np.array([np.array([init,]*(lag+1)),]*particlesize) x =np.ndarray(dims) xs=x0[:,0,:];xlag=[];ws=[];idxx=[] for t in xrange(datasize): #Prediction : p(x_{t}|x_{t-1}) for i in xrange(particlesize): x[i,0,:] =odein(x0[i,0,:])[-1]+np.random.normal(size=dim,scale=sigma)#system noise? x[i,1:datasize,:]=x0[i,0:lag,:] #Likelihood : p(y_{t}|y_{1:t}) w=[likehood(t,x[ii,0,:]) for ii in xrange(particlesize)] w=[ 0 if np.isnan(i) else i for i in w] totloglikehood+=[-np.log(sum(w)/particlesize)] #resampling try: index=resample(w/sum(w),sysnoise) except: print sum(w), #Filtering : p(x_{t}|y_{t}) if(printout):print len(w),index,len(index) assert(len(index)==particlesize) for i,ii in enumerate(index): x0[i,:,:]=x[ii,:,:] if(printout):print t,x0[:,0,:] xs=np.dstack([xs,x0[:,0,:]]) if(debugout): xlag.append(x) ws.append(w) idxx.append(index) size={"data":datasize,"dim":dim} return {"loglikehood":totloglikehood,"size":size,"lag":lag,"xlag":xlag,"x":xs,"w":ws,"index":idxx} def rossler(init=[0,5,0],num=100,a=0.2,b=0.2,c=5.7,dt=0.05): cc=[] x=init[0] y=init[1] z=init[2] for t in range(num): cc.append([x,y,z]) x=x+dt*(-y-z) y=y+dt*(x+a*y) z=z+dt*(b+z*(x-c)) return cc def _rossler_n(init,term,interval,a=0.2,b=0.2,c=5.7,dt=0.05): t=[init] for i in xrange(0,term/interval): t.append(rossler(init=t[i],num=interval,a=a,b=b,c=c,dt=dt)[-1]) return t def genrossler_n(term,interval,a=0.2,b=0.2,c=5.7,dt=0.05): return lambda x:_rossler_n(x,term,interval,a,b,c,dt) particlesize=100 lag=2 gensigma=0.2 syssigma=0.1#system noize sigma=0.4 #observation noise term=10000 interval=100 dt=0.01 def test(param=(0.1,0.1,5.7),filename="normal",draw=True,particlesize=particlesize,lag=lag,gensigma=gensigma,syssigma=syssigma,sigma=sigma,term=term,interval=interval,dt=dt): a,b,c=param rossler_n=genrossler_n(term,interval,a,b,c,dt) #original time series x=rossler_n([0,5,0]) #observed time series y=np.array([[i+np.random.normal(0,gensigma) for i in p] for p in x ]) likehood=genlikehood(y,sigma) rossler_nn=genrossler_n(interval,interval,a,b,c,dt) pf=ParticleFilter(init=np.random.normal(size=3),particlesize=particlesize,\ dim=3, datasize=len(y), odein=rossler_nn ,likehood=likehood,lag=lag,sigma=syssigma) print pf['x'].shape z=[[pf['x'][i,0,t+1] for t in xrange(np.array(pf['x']).shape[2]-1)] for i in xrange(particlesize)] print y.shape # for i in pf['w']:print i # for i in pf['index']:print i if(draw): import matplotlib.pyplot as plt #observed time series plt.plot(y[:,0],c="red") plt.plot(y[:,0],'ro',c="red") ##infered time series for i in xrange(particlesize): plt.plot(z[i],c="green") plt.plot(z[i],'ro',c="green") plt.show() # plt.savefig(filename+'.png') plt.plot(pf['loglikehood']) plt.show() np.random.seed(25) a,b,c=0.1,0.1,4 test((a,b,c),"normal") #test((0.2,0.2,5.7),"chaotic") test((0.1,0.1,9.),"chaotic",particlesize=100,interval=40) test((0.1,0.1,9.),"chaotic",particlesize=100,interval=100)