33 #include <shogun/lib/config.h> 55 REQUIRE(scale>0,
"Scale (%f) must be positive", scale);
89 void CInference::init()
118 REQUIRE(minimizer,
"Minimizer must set\n");
128 int32_t num_importance_samples,
float64_t ridge_size)
135 cov(i,i)+=ridge_size;
161 scaled_kernel(i,i)+=ridge_size;
174 ASSERT(log_likelihood.
vlen==num_importance_samples);
175 ASSERT(log_likelihood.
vlen==log_pdf_prior.vlen);
179 sum[i]=log_likelihood[i]+log_pdf_prior[i]-log_pdf_post_approx[i];
202 #pragma omp parallel for 203 for (
index_t i=0; i<num_deriv; i++)
208 if(node->data ==
this)
213 else if (node->data == this->m_model)
218 else if (node->data ==this->m_kernel)
223 else if (node->data ==this->m_mean)
230 SG_SERROR(
"Can't compute derivative of negative log marginal " 231 "likelihood wrt %s.%s", node->data->get_name(), node->key->m_name);
236 result->
add(node->key, gradient);
253 "Number of training features must be greater than zero\n")
256 "Number of labels must be greater than zero\n")
258 "Number of training vectors (%d) must match number of labels (%d)\n",
virtual bool init(CFeatures *lhs, CFeatures *rhs)
virtual void update_train_kernel()
virtual SGVector< float64_t > get_log_probability_fmatrix(const CLabels *lab, SGMatrix< float64_t > F) const
The class Labels models labels, i.e. class assignments of objects.
virtual int32_t get_num_labels() const =0
void scale(SGVector< T > &a, SGVector< T > &result, T alpha=1)
virtual SGVector< float64_t > log_pdf_multiple(SGMatrix< float64_t > samples) const
CMapNode< K, T > * get_node_ptr(int32_t index)
virtual int32_t get_num_vectors() const =0
virtual void set_scale(float64_t scale)
virtual SGVector< float64_t > get_mean_vector(const CFeatures *features) const =0
SGMatrix< float64_t > m_E
An abstract class of the mean function.
std::enable_if<!std::is_same< T, complex128_t >::value, float64_t >::type mean(const Container< T > &a)
SGMatrix< float64_t > get_kernel_matrix()
virtual void set_labels(CLabels *lab)
SGMatrix< float64_t > m_ktrtr
virtual SGVector< float64_t > get_derivative_wrt_mean(const TParameter *param)=0
virtual SGMatrix< float64_t > get_posterior_covariance()=0
virtual float64_t get_scale() const
virtual SGMatrix< float64_t > get_multiclass_E()
Class SGObject is the base class of all shogun objects.
int32_t get_num_elements() const
virtual void compute_gradient()
virtual SGMatrix< float64_t > sample(int32_t num_samples, SGMatrix< float64_t > pre_samples=SGMatrix< float64_t >()) const
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...
virtual void set_model(CLikelihoodModel *mod)
virtual void set_kernel(CKernel *kern)
virtual SGVector< float64_t > get_derivative_wrt_inference_method(const TParameter *param)=0
SGMatrix< float64_t > m_L
virtual SGVector< float64_t > get_posterior_mean()=0
virtual void register_minimizer(Minimizer *minimizer)
virtual void set_features(CFeatures *feat)
virtual SGVector< float64_t > get_derivative_wrt_kernel(const TParameter *param)=0
all of classes and functions are contained in the shogun namespace
Dense version of the well-known Gaussian probability distribution, defined as .
T sum(const Container< T > &a, bool no_diag=false)
int32_t add(const K &key, const T &data)
float64_t get_marginal_likelihood_estimate(int32_t num_importance_samples=1, float64_t ridge_size=1e-15)
virtual CMap< TParameter *, SGVector< float64_t > > * get_negative_log_marginal_likelihood_derivatives(CMap< TParameter *, CSGObject *> *parameters)
The class Features is the base class of all feature objects.
static float64_t exp(float64_t x)
static float64_t log(float64_t v)
static T log_mean_exp(SGVector< T > values)
The minimizer base class.
CLikelihoodModel * m_model
virtual void check_members() const
virtual bool parameter_hash_changed()
virtual void set_mean(CMeanFunction *m)
The Likelihood model base class.
SGVector< float64_t > m_alpha
virtual SGVector< float64_t > get_derivative_wrt_likelihood_model(const TParameter *param)=0