Skip to content

Commit

Permalink
Merge branch 'devel' into fixed_inputs
Browse files Browse the repository at this point in the history
  • Loading branch information
alansaul committed Mar 31, 2016
2 parents 597eddc + b1ac51c commit 91bebc8
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 14 deletions.
21 changes: 13 additions & 8 deletions GPy/kern/src/standard_periodic.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,11 +155,16 @@ def update_gradients_diag(self, dL_dKdiag, X):
self.period.gradient = 0
self.lengthscale.gradient = 0

# def gradients_X(self, dL_dK, X, X2=None):
# """derivative of the covariance matrix with respect to X."""
#
# raise NotImplemented("Periodic kernel: dK_dX not implemented")
#
# def gradients_X_diag(self, dL_dKdiag, X):
#
# raise NotImplemented("Periodic kernel: dKdiag_dX not implemented")
def gradients_X(self, dL_dK, X, X2=None):
K = self.K(X, X2)
if X2 is None:
dL_dK = dL_dK+dL_dK.T
X2 = X
dX = -np.pi*((dL_dK*K)[:,:,None]*np.sin(2*np.pi/self.period*(X[:,None,:] - X2[None,:,:]))/(2.*np.square(self.lengthscale)*self.period)).sum(1)
return dX

def gradients_X_diag(self, dL_dKdiag, X):
return np.zeros(X.shape)

def input_sensitivity(self, summarize=True):
return self.variance*np.ones(self.input_dim)/self.lengthscale**2
15 changes: 9 additions & 6 deletions GPy/kern/src/stationary.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,18 +489,21 @@ def __init__(self, input_dim, variance=1., lengthscale=None, power=2., ARD=False
self.link_parameters(self.power)

def K_of_r(self, r):
r2 = np.power(r, 2.)
return self.variance*np.power(1. + r2/2., -self.power)
r2 = np.square(r)
# return self.variance*np.power(1. + r2/2., -self.power)
return self.variance*np.exp(-self.power*np.log1p(r2/2.))

def dK_dr(self, r):
r2 = np.power(r, 2.)
return -self.variance*self.power*r*np.power(1. + r2/2., - self.power - 1.)
r2 = np.square(r)
# return -self.variance*self.power*r*np.power(1. + r2/2., - self.power - 1.)
return-self.variance*self.power*r*np.exp(-(self.power+1)*np.log1p(r2/2.))

def update_gradients_full(self, dL_dK, X, X2=None):
super(RatQuad, self).update_gradients_full(dL_dK, X, X2)
r = self._scaled_dist(X, X2)
r2 = np.power(r, 2.)
dK_dpow = -self.variance * np.power(2., self.power) * np.power(r2 + 2., -self.power) * np.log(0.5*(r2+2.))
r2 = np.square(r)
# dK_dpow = -self.variance * np.power(2., self.power) * np.power(r2 + 2., -self.power) * np.log(0.5*(r2+2.))
dK_dpow = -self.variance * np.exp(self.power*(np.log(2.)-np.log1p(r2+1)))*np.log1p(r2/2.)
grad = np.sum(dL_dK*dK_dpow)
self.power.gradient = grad

Expand Down

0 comments on commit 91bebc8

Please sign in to comment.