SHOGUN  6.1.3
SMIDASMinimizer.cpp
Go to the documentation of this file.
1  /*
2  * Copyright (c) The Shogun Machine Learning Toolbox
3  * Written (w) 2015 Wu Lin
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice, this
10  * list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18  * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  *
26  * The views and conclusions contained in the software and documentation are those
27  * of the authors and should not be interpreted as representing official policies,
28  * either expressed or implied, of the Shogun Development Team.
29  *
30  */
32 #include <shogun/lib/config.h>
35 #include <shogun/base/Parameter.h>
36 using namespace shogun;
37 
39  :SMDMinimizer()
40 {
41  init();
42 }
43 
45 {
46 }
47 
49  :SMDMinimizer(fun)
50 {
51  init();
52 }
53 
55 {
56  REQUIRE(m_mapping_fun, "Mapping function must set\n");
59 
60  if(m_dual_variable.vlen==0)
61  m_dual_variable=m_mapping_fun->get_dual_variable(variable_reference);
62  else
63  {
64  REQUIRE(m_dual_variable.vlen==variable_reference.vlen,
65  "The length (%d) of dual variable must match the length (%d) of variable\n",
66  m_dual_variable.vlen, variable_reference.vlen);
67  }
68  L1Penalty* penalty_type=dynamic_cast<L1Penalty*>(m_penalty_type);
69  REQUIRE(penalty_type,"For now only L1Penalty is supported. Please use the penalty for this minimizer\n");
70 
72  REQUIRE(fun,"the cost function must be a stochastic cost function\n");
74  {
75  fun->begin_sample();
76  while(fun->next_sample())
77  {
80 
84  m_mapping_fun->update_variable(variable_reference, m_dual_variable);
85  }
86  }
87  float64_t cost=m_fun->get_cost();
88  return cost+get_penalty(variable_reference);
89 }
90 
91 void SMIDASMinimizer::init()
92 {
94  SG_ADD(&m_dual_variable, "SMIDASMinimizer__m_dual_variable",
95  "dual_variable in SMIDASMinimizer", MS_NOT_AVAILABLE);
96 }
97 
99 {
103 
104  if(updater)
105  {
106  if (updater->enables_descend_correction())
107  {
108  SG_SWARNING("There is not theoretical guarantee when Descend Correction is enabled\n");
109  }
110  GradientDescendUpdater* gradient_updater=
112  if(!gradient_updater)
113  {
114  SG_SWARNING("There is not theoretical guarantee when this updater is used\n");
115  }
116  }
117  else
118  {
119  SG_SWARNING("There is not theoretical guarantee when this updater is used\n");
120  }
121  REQUIRE(m_learning_rate,"Learning Rate instance must set\n");
122 }
virtual SGVector< float64_t > get_gradient()=0
The class implements the stochastic mirror descend (SMD) minimizer.
Definition: SMDMinimizer.h:43
virtual void update_variable(SGVector< float64_t > variable, SGVector< float64_t > dual_variable)=0
The is the base class for L1 penalty/regularization within the FirstOrderMinimizer framework...
Definition: L1Penalty.h:51
#define SG_SWARNING(...)
Definition: SGIO.h:163
FirstOrderCostFunction * m_fun
#define REQUIRE(x,...)
Definition: SGIO.h:181
virtual void init_minimization()
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > negative_descend_direction, float64_t learning_rate)=0
The first order stochastic cost function base class.
virtual float64_t minimize()
MappingFunction * m_mapping_fun
Definition: SMDMinimizer.h:79
virtual void init_minimization()
virtual SGVector< float64_t > get_dual_variable(SGVector< float64_t > variable)=0
double float64_t
Definition: common.h:60
virtual float64_t get_penalty(SGVector< float64_t > var)
virtual float64_t get_cost()=0
virtual void update_variable_for_proximity(SGVector< float64_t > variable, float64_t proximal_weight)
Definition: L1Penalty.cpp:47
The class implements the gradient descend method.
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
SGVector< float64_t > m_dual_variable
This is a base class for descend update with descend based correction.
virtual float64_t get_learning_rate(int32_t iter_counter)=0
#define SG_ADD(...)
Definition: SGObject.h:93
virtual SGVector< float64_t > obtain_variable_reference()=0
index_t vlen
Definition: SGVector.h:571

SHOGUN Machine Learning Toolbox - Documentation