Source code for GPy.likelihoods.weibull

# Copyright (c) 2012 - 2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)


import numpy as np
from scipy import stats, special
import scipy as sp
from ..core.parameterization import Param
from ..core.parameterization.transformations import Logexp
from . import link_functions
from .likelihood import Likelihood


[docs]class Weibull(Likelihood): """ Implementing Weibull likelihood function ... """ def __init__(self, gp_link=None, beta=1.): if gp_link is None: #Parameterised not as link_f but as f # gp_link = link_functions.Identity() #Parameterised as link_f gp_link = link_functions.Log() super(Weibull, self).__init__(gp_link, name='Weibull') self.r = Param('r_weibull_shape', float(beta), Logexp()) self.link_parameter(self.r)
[docs] def d2logpdf_dlink2(self, link_f, y, Y_metadata=None): """ Hessian at y, given link(f), w.r.t link(f) i.e. second derivative logpdf at y given link(f_i) and link(f_j) w.r.t link(f_i) and link(f_j) The hessian will be 0 unless i == j .. math:: \\frac{d^{2} \\ln p(y_{i}|\lambda(f_{i}))}{d^{2}\\lambda(f)} = -\\beta^{2}\\frac{d\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ \\alpha_{i} = \\beta y_{i} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata which is not used in gamma distribution :returns: Diagonal of hessian matrix (second derivative of likelihood evaluated at points f) :rtype: Nx1 array .. Note:: Will return diagonal of hessian, since every where else it is 0, as the likelihood factorizes over cases (the distribution for y_i depends only on link(f_i) not on link(f_(j!=i)) """ # hess = (self.beta - 1.) / (y - link_f)**2 c = np.zeros_like(y) if Y_metadata is not None and 'censored' in Y_metadata.keys(): c = Y_metadata['censored'] # uncensored = (1-c)* (-(y ** self.r) * np.exp(-link_f)) # censored = -c*np.exp(-link_f)*y**self.r uncensored = (1-c)*(1/link_f**2 -2*y**self.r/link_f**3) censored = -c*2*y**self.r/link_f**3 hess = uncensored + censored # hess = -(y ** self.r) * np.exp(-link_f) return hess
[docs] def d3logpdf_dlink3(self, link_f, y, Y_metadata=None): """ Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ \\alpha_{i} = \\beta y_{i} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata which is not used in gamma distribution :returns: third derivative of likelihood evaluated at points f :rtype: Nx1 array """ # d3lik_dlink3 = (1. - self.beta) / (y - link_f)**3 c = np.zeros_like(y) if Y_metadata is not None and 'censored' in Y_metadata.keys(): c = Y_metadata['censored'] # uncensored = (1-c)* ((y ** self.r) * np.exp(-link_f)) # censored = c*np.exp(-link_f)*y**self.r uncensored = (1-c)*(-2/link_f**3+ 6*y**self.r/link_f**4) censored = c*6*y**self.r/link_f**4 d3lik_dlink3 = uncensored + censored # d3lik_dlink3 = (y ** self.r) * np.exp(-link_f) return d3lik_dlink3
[docs] def exact_inference_gradients(self, dL_dKdiag, Y_metadata=None): return np.zeros(self.size)
[docs] def d2logpdf_dlink2_dr(self, link_f, y, Y_metadata=None): """ Derivative of hessian of loglikelihood wrt r-shape parameter. :param link_f: :param y: :param Y_metadata: :return: """ c = np.zeros_like(y) if Y_metadata is not None and 'censored' in Y_metadata.keys(): c = Y_metadata['censored'] # uncensored = (1-c)*( -np.exp(-link_f)* (y ** self.r) * np.log(y)) # censored = -c*np.exp(-link_f)*(y**self.r)*np.log(y) uncensored = (1-c)*-2*y**self.r*np.log(y)/link_f**3 censored = c*-2*y**self.r*np.log(y)/link_f**3 d2logpdf_dlink_dr = uncensored + censored return d2logpdf_dlink_dr
[docs] def d3logpdf_dlink3_dr(self, link_f, y, Y_metadata=None): """ :param link_f: :param y: :param Y_metadata: :return: """ c = np.zeros_like(y) if Y_metadata is not None and 'censored' in Y_metadata.keys(): c = Y_metadata['censored'] uncensored = (1-c)* ((y**self.r)*np.exp(-link_f)*np.log1p(y)) censored = c*np.exp(-link_f)*(y**self.r)*np.log(y) d3logpdf_dlink3_dr = uncensored + censored return d3logpdf_dlink3_dr
[docs] def d2logpdf_dlink2_dtheta(self, f, y, Y_metadata=None): """ :param f: :param y: :param Y_metadata: :return: """ d2logpdf_dlink_dtheta2 = np.zeros((self.size, f.shape[0], f.shape[1])) d2logpdf_dlink_dtheta2[0, :, :] = self.d2logpdf_dlink2_dr(f, y, Y_metadata) return d2logpdf_dlink_dtheta2
[docs] def update_gradients(self, grads): """ Pull out the gradients, be careful as the order must match the order in which the parameters are added """ self.r.gradient = grads[0]
[docs] def samples(self, gp, Y_metadata=None): """ Returns a set of samples of observations conditioned on a given value of latent variable f. :param gp: latent variable """ orig_shape = gp.shape gp = gp.flatten() weibull_samples = np.array([sp.stats.weibull_min.rvs(self.r, loc=0, scale=self.gp_link.transf(f)) for f in gp]) return weibull_samples.reshape(orig_shape)