kcentroid_abstract.h 7.38 KB
Newer Older
1
2
// Copyright (C) 2008  Davis E. King (davisking@users.sourceforge.net)
// License: Boost Software License   See LICENSE.txt for the full license.
3
4
#undef DLIB_KCENTROId_ABSTRACT_
#ifdef DLIB_KCENTROId_ABSTRACT_
5
6
7
8
9
10
11
12
13
14
15
16
17

#include <cmath>
#include "../matrix/matrix_abstract.h"
#include "../algs.h"
#include "../serialize.h"
#include "kernel_abstract.h"

namespace dlib
{

    template <
        typename kernel_type
        >
18
    class kcentroid
19
20
21
22
23
24
25
    {
        /*!
            REQUIREMENTS ON kernel_type
                is a kernel function object as defined in dlib/svm/kernel_abstract.h 

            INITIAL VALUE
                - dictionary_size() == 0
26
                - samples_trained() == 0
27
28
29

            WHAT THIS OBJECT REPRESENTS
                This is an implementation of an online algorithm for recursively estimating the
30
                centroid of a sequence of training points.  It uses the sparsification technique
31
32
                described in the paper The Kernel Recursive Least Squares Algorithm by Yaakov Engel.

33
                This object then allows you to compute the distance between the centroid 
34
35
36
                and any test points.  So you can use this object to predict how similar a test
                point is to the data this object has been trained on (larger distances from the
                centroid indicate dissimilarity/anomalous points).
37
38
39
40
41
42
43

                Also note that the algorithm internally keeps a set of "dictionary vectors" 
                that are used to represent the regression function.  You can force the 
                algorithm to use no more than a set number of vectors by setting 
                the 3rd constructor argument to whatever you want.  However, note that 
                doing this causes the algorithm to bias it's results towards more 
                recent training examples.  
44
45
46
47
48
49
50
51
        !*/

    public:
        typedef typename kernel_type::scalar_type scalar_type;
        typedef typename kernel_type::sample_type sample_type;
        typedef typename kernel_type::mem_manager_type mem_manager_type;


52
        explicit kcentroid (
53
            const kernel_type& kernel_, 
54
55
            scalar_type tolerance_ = 0.001,
            unsigned long max_dictionary_size_ = 1000000
56
57
58
59
        );
        /*!
            ensures
                - this object is properly initialized
60
                - #tolerance() == tolerance_
61
                - #get_kernel() == kernel_
62
                - #max_dictionary_size() == max_dictionary_size_
63
64
        !*/

65
66
67
68
69
70
71
        const kernel_type& get_kernel (
        ) const;
        /*!
            ensures
                - returns a const reference to the kernel used by this object
        !*/

72
        unsigned long max_dictionary_size(
73
        ) const;
74
75
        /*!
            ensures
76
77
                - returns the maximum number of dictionary vectors this object
                  will use at a time.  That is, dictionary_size() will never be
78
                  greater than max_dictionary_size().
79
80
81
82
83
84
85
        !*/

        scalar_type samples_trained (
        ) const;
        /*!
            ensures
                - returns the number of samples this object has been trained on so far
86
87
        !*/

88
        scalar_type tolerance(
89
90
91
92
93
94
95
96
97
98
99
100
        ) const;
        /*!
            ensures
                - returns the tolerance to use for the approximately linearly dependent 
                  test used for sparsification (see the KRLS paper for details).  This is 
                  a number which governs how accurately this object will approximate the 
                  centroid it is learning.  Smaller values generally result in a more accurate 
                  estimate while also resulting in a bigger set of support vectors in 
                  the learned dictionary.  Bigger tolerances values result in a 
                  less accurate estimate but also in less support vectors.
        !*/

101
        void clear_dictionary (
102
103
104
        );
        /*!
            ensures
105
106
                - clears out all learned data (e.g. #dictionary_size() == 0)
                - #samples_seen() == 0
107
108
        !*/

109
110
        scalar_type operator() (
            const sample_type& x
111
112
113
        ) const;
        /*!
            ensures
114
115
116
                - returns the distance in feature space between the sample x and the
                  current estimate of the centroid of the training samples given
                  to this object so far.
117
118
        !*/

119
120
        void train (
            const sample_type& x
121
122
123
        );
        /*!
            ensures
124
125
126
127
                - adds the sample x into the current estimate of the centroid
                - also note that calling this function is equivalent to calling
                  train(x, samples_trained()/(samples_trained()+1.0, 1.0/(samples_trained()+1.0).  
                  That is, this function finds the normal unweighted centroid of all training points.
128
129
        !*/

130
131
132
133
134
        void train (
            const sample_type& x,
            double cscale,
            double xscale
        );
135
136
        /*!
            ensures
137
138
139
140
141
                - adds the sample x into the current estimate of the centroid but
                  uses a user given scale.  That is, this function performs:
                    - new_centroid = cscale*old_centroid + xscale*x
                - This function allows you to weight different samples however 
                  you want.
142
143
        !*/

144
145
146
147
148
149
150
151
152
153
154
155
        scalar_type test_and_train (
            const sample_type& x
        );
        /*!
            ensures
                - calls train(x)
                - returns (*this)(x)
                - The reason this function exists is because train() and operator() 
                  both compute some of the same things.  So this function is more efficient
                  than calling both individually.
        !*/

156
157
158
159
        scalar_type test_and_train (
            const sample_type& x,
            double cscale,
            double xscale
160
161
162
        );
        /*!
            ensures
163
164
165
166
167
                - calls train(x,cscale,xscale)
                - returns (*this)(x)
                - The reason this function exists is because train() and operator() 
                  both compute some of the same things.  So this function is more efficient
                  than calling both individually.
168
169
170
        !*/

        void swap (
171
            kcentroid& item
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
        );
        /*!
            ensures
                - swaps *this with item
        !*/

        unsigned long dictionary_size (
        ) const;
        /*!
            ensures
                - returns the number of "support vectors" in the dictionary.  
        !*/

    };

// ----------------------------------------------------------------------------------------

    template <
        typename kernel_type
        >
    void swap(
193
194
        kcentroid<kernel_type>& a, 
        kcentroid<kernel_type>& b
195
196
197
198
199
200
201
202
203
    ) { a.swap(b); }
    /*!
        provides a global swap function
    !*/

    template <
        typename kernel_type
        >
    void serialize (
204
        const kcentroid<kernel_type>& item,
205
206
207
        std::ostream& out
    );
    /*!
208
        provides serialization support for kcentroid objects
209
210
211
212
213
214
    !*/

    template <
        typename kernel_type 
        >
    void deserialize (
215
        kcentroid<kernel_type>& item,
216
217
218
        std::istream& in 
    );
    /*!
219
        provides serialization support for kcentroid objects
220
221
222
223
224
225
    !*/

// ----------------------------------------------------------------------------------------

}

226
#endif // DLIB_KCENTROId_ABSTRACT_
227