27 #ifndef DOXYGEN_SHOULD_SKIP_THIS 33 virtual ~GradientModelSelectionCostFunction() {
SG_UNREF(m_obj); }
44 void unset_target(
bool is_unref)
55 REQUIRE(m_obj,
"Object not set\n");
56 return m_obj->get_cost(m_val, m_grad, m_func_data);
61 REQUIRE(m_obj,
"Object not set\n");
66 REQUIRE(m_obj,
"Object not set\n");
70 virtual const char* get_name()
const {
return "GradientModelSelectionCostFunction"; }
72 virtual void set_func_data(
void *func_data)
74 REQUIRE(func_data != NULL,
"func_data must set\n");
75 m_func_data = func_data;
88 SG_ADD((
CSGObject **)&m_obj,
"GradientModelSelectionCostFunction__m_obj",
92 SG_ADD(m_val,
"GradientModelSelectionCostFunction__m_val",
95 SG_ADD(m_grad,
"GradientModelSelectionCostFunction__m_grad",
121 REQUIRE(func_data!=NULL,
"func_data must set\n");
122 REQUIRE(model_vars.
vlen==model_grads.
vlen,
"length of variable (%d) and gradient (%d) must equal\n",
125 nlopt_params* params=(nlopt_params*)func_data;
129 bool print_state=params->print_state;
136 CMapNode<TParameter*, CSGObject*>*
node=parameter_dictionary->
get_node_ptr(i);
151 model_vars[offset++], parent, j);
152 REQUIRE(result,
"Parameter %s not found in combination tree\n",
159 model_vars[offset++], parent);
160 REQUIRE(result,
"Parameter %s not found in combination tree\n",
166 CMachine* machine=m_machine_eval->get_machine();
194 if (m_machine_eval->get_evaluation_direction()==
ED_MINIMIZE)
210 CMapNode<TParameter*, CSGObject*>* node=parameter_dictionary->
get_node_ptr(i);
216 CMapNode<TParameter*, CSGObject*>* gradient_node=
219 if (gradient_node->data==node->data &&
220 !strcmp(gradient_node->key->m_name, node->key->m_name))
222 derivative=gradient->
get_element(gradient_node->key);
226 REQUIRE(derivative.
vlen,
"Can't find gradient wrt %s parameter!\n",
231 offset+=derivative.
vlen;
237 if (m_machine_eval->get_evaluation_direction()==
ED_MINIMIZE)
243 model_grads.
scale(-1);
252 REQUIRE(minimizer!=NULL,
"Minimizer must set\n");
255 m_mode_minimizer=minimizer;
276 void CGradientModelSelection::init()
293 SG_REF(current_combination);
316 CMapNode<TParameter*, SGVector<float64_t> >* node=argument->
get_node_ptr(i);
318 offset+=node->
data.vlen;
331 params.current_combination=current_combination;
332 params.print_state=print_state;
333 params.parameter_dictionary=parameter_dictionary;
340 SG_PRINT(
"Minimizing objective function:\n");
344 SG_PRINT(
"Maximizing objective function:\n");
349 cost_fun->set_target(
this);
350 cost_fun->set_variables(model_vars);
351 cost_fun->set_func_data(¶ms);
353 #ifdef USE_REFERENCE_COUNTING 354 if(this->ref_count()>1)
361 cost_fun->unset_target(cleanup);
373 return current_combination;
virtual CParameterCombination * select_model(bool print_state=false)
Model selection class which searches for the best model by a gradient-search.
virtual ~CGradientModelSelection()
bool set_parameter(const char *name, T value, CSGObject *parent, index_t index=-1)
FirstOrderMinimizer * m_mode_minimizer
static T sum(T *vec, int32_t len)
Return sum(vec)
CMapNode< K, T > * get_node_ptr(int32_t index)
virtual float64_t get_cost(SGVector< float64_t > model_vars, SGVector< float64_t > model_grads, void *func_data)
#define SG_NOTIMPLEMENTED
int64_t get_num_elements()
virtual void set_minimizer(FirstOrderMinimizer *minimizer)
A generic learning machine interface.
The class wraps the Shogun's C-style LBFGS minimizer.
Class to select parameters and their ranges for model selection. The structure is organized as a tree...
virtual void print_result()
Abstract base class for model selection.
T get_element(const K &key)
CModelSelectionParameters * m_model_parameters
static CGradientResult * obtain_from_generic(CEvaluationResult *eval_result)
CMachineEvaluation * m_machine_eval
void scale(T alpha)
Scale vector inplace.
friend class GradientModelSelectionCostFunction
Class SGObject is the base class of all shogun objects.
int32_t get_num_elements() const
virtual void unset_cost_function(bool is_unref=true)
virtual CMap< TParameter *, CSGObject * > * get_paramter_dictionary()
Abstract class that contains the result generated by the MachineEvaluation class. ...
Machine Evaluation is an abstract class that evaluates a machine according to some criterion...
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...
CGradientModelSelection()
virtual CMap< TParameter *, SGVector< float64_t > > * get_gradient()
The first order cost function base class.
Class that holds ONE combination of parameters for a learning machine. The structure is organized as ...
virtual float64_t minimize()=0
EEvaluationDirection get_evaluation_direction()
all of classes and functions are contained in the shogun namespace
void apply_to_machine(CMachine *machine) const
CMachine * get_machine() const
Container class that returns results from GradientEvaluation. It contains the function value as well ...
static int is_infinity(double f)
checks whether a float is infinity
static int is_nan(double f)
checks whether a float is nan
virtual SGVector< float64_t > get_value()
virtual uint32_t get_parameters_length()
virtual void build_parameter_values_map(CMap< TParameter *, SGVector< float64_t > > *values_map)
virtual void build_parameter_parent_map(CMap< TParameter *, CSGObject *> *parent_map)
void print_tree(int prefix_num=0) const
virtual void set_cost_function(FirstOrderCostFunction *fun)
The first order minimizer base class.