This file is indexed.

/usr/share/pyshared/mvpa/clfs/blr.py is in python-mvpa 0.4.8-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
#   Copyright (c) 2008 Emanuele Olivetti <emanuele@relativita.com>
#   See COPYING file distributed along with the PyMVPA package for the
#   copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Bayesian Linear Regression (BLR)."""

__docformat__ = 'restructuredtext'


import numpy as N

from mvpa.misc.state import StateVariable
from mvpa.clfs.base import Classifier

if __debug__:
    from mvpa.misc import debug


class BLR(Classifier):
    """Bayesian Linear Regression (BLR).

    """

    predicted_variances = StateVariable(enabled=False,
        doc="Variance per each predicted value")

    log_marginal_likelihood = StateVariable(enabled=False,
        doc="Log Marginal Likelihood")


    _clf_internals = [ 'blr', 'regression', 'linear' ]

    def __init__(self, sigma_p = None, sigma_noise=1.0, **kwargs):
        """Initialize a BLR regression analysis.

        :Parameters:
          sigma_noise : float
            the standard deviation of the gaussian noise.
            (Defaults to 0.1)

        """
        # init base class first
        Classifier.__init__(self, **kwargs)

        # pylint happiness
        self.w = None

        # It does not make sense to calculate a confusion matrix for a
        # BLR:
        self.states.enable('training_confusion', False)

        # set the prior on w: N(0,sigma_p) , specifying the covariance
        # sigma_p on w:
        self.sigma_p = sigma_p

        # set noise level:
        self.sigma_noise = sigma_noise

        self.predicted_variances = None
        self.log_marginal_likelihood = None
        self.labels = None
        pass

    def __repr__(self):
        """String summary of the object
        """
        return """BLR(w=%s, sigma_p=%s, sigma_noise=%f, enable_states=%s)""" % \
               (self.w, self.sigma_p, self.sigma_noise, str(self.states.enabled))


    def compute_log_marginal_likelihood(self):
        """
        Compute log marginal likelihood using self.train_fv and self.labels.
        """
        # log_marginal_likelihood = None
        # return log_marginal_likelihood
        raise NotImplementedError
    

    def _train(self, data):
        """Train regression using `data` (`Dataset`).
        """
        # provide a basic (i.e. identity matrix) and correct prior
        # sigma_p, if not provided before or not compliant to 'data':
        if self.sigma_p == None: # case: not provided
            self.sigma_p = N.eye(data.samples.shape[1]+1)
        elif self.sigma_p.shape[1] != (data.samples.shape[1]+1): # case: wrong dimensions
            self.sigma_p = N.eye(data.samples.shape[1]+1)
        else:
            # ...then everything is OK :)
            pass

        # add one fake column of '1.0' to model the intercept:
        self.samples_train = N.hstack([data.samples,N.ones((data.samples.shape[0],1))])
        if type(self.sigma_p)!=type(self.samples_train): # if sigma_p is a number...
            self.sigma_p = N.eye(self.samples_train.shape[1])*self.sigma_p # convert in matrix
            pass

        self.A_inv = N.linalg.inv(1.0/(self.sigma_noise**2) *
                                  N.dot(self.samples_train.T,
                                        self.samples_train) +
                                  N.linalg.inv(self.sigma_p))
        self.w = 1.0/(self.sigma_noise**2) * N.dot(self.A_inv,
                                                   N.dot(self.samples_train.T,
                                                         data.labels))
        pass


    def _predict(self, data):
        """
        Predict the output for the provided data.
        """

        data = N.hstack([data,N.ones((data.shape[0],1),dtype=data.dtype)])
        predictions = N.dot(data,self.w)
        
        if self.states.isEnabled('predicted_variances'):
            # do computation only if state variable was enabled
            self.predicted_variances = N.dot(data, N.dot(self.A_inv, data.T)).diagonal()[:,N.newaxis]

        return predictions


    def set_hyperparameters(self,*args):
        """
        Set hyperparameters' values.

        Note that this is a list so the order of the values is
        important.
        """
        args=args[0]
        self.sigma_noise = args[0]
        if len(args)>1:
            self.sigma_p = N.array(args[1:]) # XXX check if this is ok
            pass
        return

    pass


if __name__ == "__main__":
    import pylab
    pylab.close("all")
    pylab.ion()

    from mvpa.misc.data_generators import linear_awgn

    train_size = 10
    test_size = 100
    F = 1 # dimensions of the dataset

    # N.random.seed(1)

    slope = N.random.rand(F)
    intercept = N.random.rand(1)
    print "True slope:",slope
    print "True intercept:",intercept
    
    dataset_train = linear_awgn(train_size, intercept=intercept, slope=slope)
    # print dataset.labels

    dataset_test = linear_awgn(test_size, intercept=intercept, slope=slope, flat=True)

    regression = True
    logml = False

    b = BLR(sigma_p=N.eye(F+1), sigma_noise=0.1, regression=True)
    b.states.enable("predicted_variances")
    b.train(dataset_train)
    predictions = b.predict(dataset_test.samples)
    print "Predicted slope and intercept:",b.w
    
    if F==1:
        pylab.plot(dataset_train.samples,dataset_train.labels,"ro",label="train")
        
        pylab.plot(dataset_test.samples,predictions,"b-",label="prediction")
        pylab.plot(dataset_test.samples,predictions+N.sqrt(b.predicted_variances),"b--",label="pred(+/-)std")
        pylab.plot(dataset_test.samples,predictions-N.sqrt(b.predicted_variances),"b--",label=None)
        # pylab.plot(dataset_test.samples,dataset_test.labels,"go")
        pylab.legend()
        pylab.xlabel("samples")
        pylab.ylabel("labels")
        pylab.title("Bayesian Linear Regression on dataset 'linear_AWGN'")
        pass