/usr/include/shogun/structure/StochasticSOSVM.h is in libshogun-dev 3.2.0-7.3build4.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 | /*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Written (W) 2013 Shell Hu
* Copyright (C) 2013 Shell Hu
*/
#ifndef __STOCHASTIC_SOSVM_H__
#define __STOCHASTIC_SOSVM_H__
#include <shogun/lib/SGVector.h>
#include <shogun/machine/LinearStructuredOutputMachine.h>
namespace shogun
{
/** @brief Class CStochasticSOSVM solves SOSVM using stochastic subgradient descent
* on the SVM primal problem [1], which is equivalent to SGD or Pegasos [2].
* This class is inspired by the matlab SGD implementation in [3].
*
* [1] N. Ratliff, J. A. Bagnell, and M. Zinkevich. (online) subgradient methods
* for structured prediction. AISTATS, 2007.
* [2] S. Shalev-Shwartz, Y. Singer, N. Srebro. Pegasos: Primal Estimated
* sub-GrAdient SOlver for SVM. ICML 2007.
* [3] S. Lacoste-Julien, M. Jaggi, M. Schmidt and P. Pletscher. Block-Coordinate
* Frank-Wolfe Optimization for Structural SVMs. ICML 2013.
*/
class CStochasticSOSVM : public CLinearStructuredOutputMachine
{
public:
/** default constructor */
CStochasticSOSVM();
/** standard constructor
*
* @param model structured model with application specific functions
* @param labs structured labels
* @param do_weighted_averaging whether mix w with previous average weights
* @param verbose whether compute debug information, such as primal value, duality gap etc.
*/
CStochasticSOSVM(CStructuredModel* model, CStructuredLabels* labs,
bool do_weighted_averaging = true, bool verbose = false);
/** destructor */
~CStochasticSOSVM();
/** @return name of SGSerializable */
virtual const char* get_name() const { return "StochasticSOSVM"; }
/** get classifier type
*
* @return classifier type CT_STOCHASTICSOSVM
*/
virtual EMachineType get_classifier_type();
/** @return lambda */
float64_t get_lambda() const;
/** set regularization const
*
* @param lbda regularization const lambda
*/
void set_lambda(float64_t lbda);
/** @return number of iterations */
int32_t get_num_iter() const;
/** set max number of iterations
*
* @param num_iter number of iterations
*/
void set_num_iter(int32_t num_iter);
/** @return debug multiplier */
int32_t get_debug_multiplier() const;
/** set frequency of debug outputs
*
* @param multiplier debug multiplier
*/
void set_debug_multiplier(int32_t multiplier);
/** @return rand seed */
uint32_t get_rand_seed() const;
/** set random seed
*
* @param rand_seed random seed
*/
void set_rand_seed(uint32_t rand_seed);
protected:
/** train primal SO-SVM
*
* @param data training data
* @return whether the training was successful
*/
virtual bool train_machine(CFeatures* data = NULL);
private:
/** register and initialize parameters */
void init();
private:
/** The regularization constant (default: 1/n) */
float64_t m_lambda;
/** Number of passes through the data (default: 50) */
int32_t m_num_iter;
/** Whether to use weighted averaging of the iterates */
bool m_do_weighted_averaging;
/** random seed */
uint32_t m_rand_seed;
/** If set to 0, the algorithm computes the objective after each full
* pass trough the data. If in (0,100) logging happens at a
* geometrically increasing sequence of iterates, thus allowing for
* within-iteration logging. The smaller the number, the more
* costly the computations will be!
*/
int32_t m_debug_multiplier;
}; /* CStochasticSOSVM */
} /* namespace shogun */
#endif
|