Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- 'Desired error not necessarily achieved due to precision loss.'
- ### Import Stuff
- from __future__ import division
- import numpy as np
- import matplotlib.pyplot as plt
- import scipy.optimize as op
- def calc_K(x,l,k=1e-3):
- """ calculate covariance matrix
- ##################################
- x: inputs
- l: GP length scale
- k: noise and addition term fixed to small value
- to ensure smooth inferred trajectories
- """
- a1,a2 = np.meshgrid(x,x)
- sqDist = (a1-a2)**2
- cov = (1-k)*np.exp(-.5*sqDist*l) + np.eye(len(x))*k
- return cov,sqDist
- ###generate fake data
- t = np.linspace(0,10,num=67)
- length_scale = 1
- K,_ = calc_K(x=t,l=length_scale)
- mvn = multivariate_normal(mean=[0]*len(t),cov=K)
- y = mvn.rvs() #generate the function
- #y += + np.random.normal(0,scale=.2,size=len(t)) #add some noise
- plt.plot(y,'-o')
- #functions for LL and gradient
- def logPost(tav_0,x,y):
- tav = np.exp(tav_0)
- K,sqDist = calc_K(x,l=tav)
- s,logdet = np.linalg.slogdet(K)
- t1 = s*logdet
- Kinv = np.linalg.inv(K)
- t2 = y.dot(np.dot(Kinv,y))
- t3 = len(y)*np.log(2*np.pi)
- ll = -.5*(t1 + t2 + 0*t3)
- return -ll
- def logPost_grad(tav,x,y):
- tav = np.exp(tav)
- #print tav
- #Km=get_covariance_matrix(t,k=k,l=tav,add_offset=0)
- K,sqDist = calc_K(x,l=tav)
- Kinv = np.linalg.inv(K)
- dK = sqDist*K
- t1 = np.trace(np.dot(Kinv,dK))
- t2 = np.dot(np.dot(y,Kinv),
- np.dot(dK,Kinv).dot(y))
- dl = -.5*(t1 + t2)
- return dl
- ### Inference pretending we have perfectly inferred y
- initp = 5
- r = op.minimize(fun = logPost,
- x0 = initp,
- args = (t,y),
- jac=0,
- method='CG',
- options = {'disp': False,'gtol':1e-4,})
- print r
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement