This file is indexed.

/usr/include/dlib/svm/kcentroid_abstract.h is in libdlib-dev 18.18-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
// Copyright (C) 2008  Davis E. King (davis@dlib.net)
// License: Boost Software License   See LICENSE.txt for the full license.
#undef DLIB_KCENTROId_ABSTRACT_
#ifdef DLIB_KCENTROId_ABSTRACT_

#include "../algs.h"
#include "../serialize.h"
#include "kernel_abstract.h"

namespace dlib
{

    template <
        typename kernel_type
        >
    class kcentroid
    {
        /*!
            REQUIREMENTS ON kernel_type
                is a kernel function object as defined in dlib/svm/kernel_abstract.h 

            INITIAL VALUE
                - dictionary_size() == 0
                - samples_trained() == 0

            WHAT THIS OBJECT REPRESENTS
                This object represents a weighted sum of sample points in a kernel induced
                feature space.  It can be used to kernelize any algorithm that requires only
                the ability to perform vector addition, subtraction, scalar multiplication,
                and inner products.  

                An example use of this object is as an online algorithm for recursively estimating 
                the centroid of a sequence of training points.  This object then allows you to 
                compute the distance between the centroid and any test points.  So you can use 
                this object to predict how similar a test point is to the data this object has 
                been trained on (larger distances from the centroid indicate dissimilarity/anomalous 
                points).  

                Also note that the algorithm internally keeps a set of "dictionary vectors" 
                that are used to represent the centroid.  You can force the algorithm to use 
                no more than a set number of vectors by setting the 3rd constructor argument 
                to whatever you want.  

                This object uses the sparsification technique described in the paper The 
                Kernel Recursive Least Squares Algorithm by Yaakov Engel.  This technique
                allows us to keep the number of dictionary vectors down to a minimum.  In fact,
                the object has a user selectable tolerance parameter that controls the trade off
                between accuracy and number of stored dictionary vectors.
        !*/

    public:
        typedef typename kernel_type::scalar_type scalar_type;
        typedef typename kernel_type::sample_type sample_type;
        typedef typename kernel_type::mem_manager_type mem_manager_type;

        kcentroid (
        );
        /*!
            ensures
                - this object is properly initialized
                - #tolerance() == 0.001 
                - #get_kernel() == kernel_type() (i.e. whatever the kernel's default value is) 
                - #max_dictionary_size() == 1000000
                - #remove_oldest_first() == false 
        !*/

        explicit kcentroid (
            const kernel_type& kernel_, 
            scalar_type tolerance_ = 0.001,
            unsigned long max_dictionary_size_ = 1000000,
            bool remove_oldest_first_ = false 
        );
        /*!
            requires
                - tolerance > 0
                - max_dictionary_size_ > 1
            ensures
                - this object is properly initialized
                - #tolerance() == tolerance_
                - #get_kernel() == kernel_
                - #max_dictionary_size() == max_dictionary_size_
                - #remove_oldest_first() == remove_oldest_first_
        !*/

        const kernel_type& get_kernel (
        ) const;
        /*!
            ensures
                - returns a const reference to the kernel used by this object
        !*/

        unsigned long max_dictionary_size(
        ) const;
        /*!
            ensures
                - returns the maximum number of dictionary vectors this object will 
                  use at a time.  That is, dictionary_size() will never be greater 
                  than max_dictionary_size().
        !*/

        bool remove_oldest_first (
        ) const;
        /*!
            ensures
                - When the maximum dictionary size is reached this object sometimes
                  needs to discard dictionary vectors when new samples are added via
                  one of the train functions.  When this happens this object chooses 
                  the dictionary vector to discard based on the setting of the
                  remove_oldest_first() parameter.
                - if (remove_oldest_first() == true) then
                    - This object discards the oldest dictionary vectors when necessary.  
                      This is an appropriate mode when using this object in an online
                      setting and the input training samples come from a slowly 
                      varying distribution.
                - else (remove_oldest_first() == false) then
                    - This object discards the most linearly dependent dictionary vectors 
                      when necessary.  This it the default behavior and should be used 
                      in most cases.
        !*/

        unsigned long dictionary_size (
        ) const;
        /*!
            ensures
                - returns the number of basis vectors in the dictionary.  These are
                  the basis vectors used by this object to represent a point in kernel
                  feature space.
        !*/

        scalar_type samples_trained (
        ) const;
        /*!
            ensures
                - returns the number of samples this object has been trained on so far
        !*/

        scalar_type tolerance(
        ) const;
        /*!
            ensures
                - returns the tolerance to use for the approximately linearly dependent 
                  test used for sparsification (see the KRLS paper for details).  This is 
                  a number which governs how accurately this object will approximate the 
                  centroid it is learning.  Smaller values generally result in a more 
                  accurate estimate while also resulting in a bigger set of vectors in 
                  the dictionary.  Bigger tolerances values result in a less accurate 
                  estimate but also in less dictionary vectors.  (Note that in any case, 
                  the max_dictionary_size() limits the number of dictionary vectors no 
                  matter the setting of the tolerance)
                - The exact meaning of the tolerance parameter is the following: 
                  Imagine that we have an empirical_kernel_map that contains all
                  the current dictionary vectors.  Then the tolerance is the minimum
                  projection error (as given by empirical_kernel_map::project()) required
                  to cause us to include a new vector in the dictionary.  So each time
                  you call train() the kcentroid basically just computes the projection
                  error for that new sample and if it is larger than the tolerance
                  then that new sample becomes part of the dictionary.
        !*/

        void clear_dictionary (
        );
        /*!
            ensures
                - clears out all learned data (e.g. #dictionary_size() == 0)
                - #samples_seen() == 0
        !*/

        scalar_type operator() (
            const kcentroid& x
        ) const;
        /*!
            requires
                - x.get_kernel() == get_kernel()
            ensures
                - returns the distance in kernel feature space between this centroid and the
                  centroid represented by x.  
        !*/

        scalar_type operator() (
            const sample_type& x
        ) const;
        /*!
            ensures
                - returns the distance in kernel feature space between the sample x and the
                  current estimate of the centroid of the training samples given
                  to this object so far.
        !*/

        scalar_type inner_product (
            const sample_type& x
        ) const;
        /*!
            ensures
                - returns the inner product of the given x point and the current
                  estimate of the centroid of the training samples given to this object
                  so far.
        !*/

        scalar_type inner_product (
            const kcentroid& x
        ) const;
        /*!
            requires
                - x.get_kernel() == get_kernel()
            ensures
                - returns the inner product between x and this centroid object.
        !*/

        scalar_type squared_norm (
        ) const;
        /*!
            ensures
                - returns the squared norm of the centroid vector represented by this
                  object.  I.e. returns this->inner_product(*this)
        !*/

        void train (
            const sample_type& x
        );
        /*!
            ensures
                - adds the sample x into the current estimate of the centroid
                - also note that calling this function is equivalent to calling
                  train(x, samples_trained()/(samples_trained()+1.0, 1.0/(samples_trained()+1.0).  
                  That is, this function finds the normal unweighted centroid of all training points.
        !*/

        void train (
            const sample_type& x,
            scalar_type cscale,
            scalar_type xscale
        );
        /*!
            ensures
                - adds the sample x into the current estimate of the centroid but
                  uses a user given scale.  That is, this function performs:
                    - new_centroid = cscale*old_centroid + xscale*x
                - This function allows you to weight different samples however 
                  you want.
        !*/

        void scale_by (
            scalar_type cscale
        );
        /*!
            ensures
                - multiplies the current centroid vector by the given scale value.  
                  This function is equivalent to calling train(some_x_value, cscale, 0).
                  So it performs:   
                    - new_centroid == cscale*old_centroid
        !*/

        scalar_type test_and_train (
            const sample_type& x
        );
        /*!
            ensures
                - calls train(x)
                - returns (*this)(x)
                - The reason this function exists is because train() and operator() 
                  both compute some of the same things.  So this function is more efficient
                  than calling both individually.
        !*/

        scalar_type test_and_train (
            const sample_type& x,
            scalar_type cscale,
            scalar_type xscale
        );
        /*!
            ensures
                - calls train(x,cscale,xscale)
                - returns (*this)(x)
                - The reason this function exists is because train() and operator() 
                  both compute some of the same things.  So this function is more efficient
                  than calling both individually.
        !*/

        void swap (
            kcentroid& item
        );
        /*!
            ensures
                - swaps *this with item
        !*/

        distance_function<kernel_type> get_distance_function (
        ) const;
        /*!
            ensures
                - returns a distance function F that represents the point learned
                  by this object so far.  I.e. it is the case that:
                    - for all x: F(x) == (*this)(x)
        !*/


    };

// ----------------------------------------------------------------------------------------

    template <
        typename kernel_type
        >
    void swap(
        kcentroid<kernel_type>& a, 
        kcentroid<kernel_type>& b
    ) { a.swap(b); }
    /*!
        provides a global swap function
    !*/

    template <
        typename kernel_type
        >
    void serialize (
        const kcentroid<kernel_type>& item,
        std::ostream& out
    );
    /*!
        provides serialization support for kcentroid objects
    !*/

    template <
        typename kernel_type 
        >
    void deserialize (
        kcentroid<kernel_type>& item,
        std::istream& in 
    );
    /*!
        provides serialization support for kcentroid objects
    !*/

// ----------------------------------------------------------------------------------------

}

#endif // DLIB_KCENTROId_ABSTRACT_