Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Fixed a few documentation warnings
  • Loading branch information
lisitsyn committed Aug 13, 2012
1 parent 8c987fd commit d87c378
Show file tree
Hide file tree
Showing 13 changed files with 68 additions and 33 deletions.
2 changes: 1 addition & 1 deletion src/shogun/base/Parameter.h
Expand Up @@ -108,7 +108,7 @@ struct TParameter
* its parameter, but from scratch using allocate_data_from_scratch */
bool m_was_allocated_from_scratch;

/*Incrementally get a hash from parameter value*
/** Incrementally get a hash from parameter value*
*
* @param hash current hash value
* @param carry value for incremental murmur hashing
Expand Down
11 changes: 7 additions & 4 deletions src/shogun/kernel/GaussianARDKernel.h
Expand Up @@ -33,6 +33,7 @@ class CGaussianARDKernel: public CLinearARDKernel
/** constructor
*
* @param size cache size
* @param width kernel width
*/
CGaussianARDKernel(int32_t size, float64_t width);

Expand All @@ -41,10 +42,12 @@ class CGaussianARDKernel: public CLinearARDKernel
* @param l features of left-hand side
* @param r features of right-hand side
* @param size cache size
* @param width kernel width
*/
CGaussianARDKernel(CDenseFeatures<float64_t>* l, CDenseFeatures<float64_t>* r,
int32_t size=10, float64_t width = 2.0);

/** destructor */
virtual ~CGaussianARDKernel();

/** initialize kernel
Expand All @@ -65,20 +68,20 @@ class CGaussianARDKernel: public CLinearARDKernel
*
* @return name GaussianARDKernel
*/
inline virtual const char* get_name() const { return "GaussianARDKernel"; }
virtual const char* get_name() const { return "GaussianARDKernel"; }

/** return derivative with respect to specified parameter
*
* @param param the parameter
* @param param the parameter
* @param obj the object that owns the parameter
* @index index the index of the element if parameter is a vector
* @param index index the index of the element if parameter is a vector
*
* @return gradient with respect to parameter
*/
virtual SGMatrix<float64_t> get_parameter_gradient(TParameter* param,
CSGObject* obj, index_t index = -1);

protected:
protected:

/** compute kernel function for features a and b
* idx_{a,b} denote the index of the feature vectors
Expand Down
6 changes: 6 additions & 0 deletions src/shogun/machine/OnlineLinearMachine.h
Expand Up @@ -233,8 +233,14 @@ class COnlineLinearMachine : public CMachine
*/
virtual bool train_machine(CFeatures* data=NULL);

/** get real outputs
*
* @param data features to compute outputs
* @return outputs
*/
SGVector<float64_t> apply_get_outputs(CFeatures* data);

/** whether train require labels */
virtual bool train_require_labels() const { return false; }

protected:
Expand Down
3 changes: 3 additions & 0 deletions src/shogun/modelselection/GradientModelSelection.h
Expand Up @@ -111,8 +111,11 @@ class CGradientModelSelection: public CModelSelection
/** struct used for nlopt callback function*/
struct nlopt_package
{
/** machine evaluation */
shogun::CMachineEvaluation* m_machine_eval;
/** current combination */
shogun::CParameterCombination* m_current_combination;
/** whether print current state */
bool print_state;
};

Expand Down
15 changes: 10 additions & 5 deletions src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.h
Expand Up @@ -81,11 +81,16 @@ class CECOCRandomSparseEncoder: public CECOCEncoder
virtual SGMatrix<int32_t> create_codebook(int32_t num_classes);

protected:
int32_t m_maxiter; // max number of iterations
int32_t m_codelen; // code length
float64_t m_pzero; // probability of zero
float64_t m_pposone; // probability of +1
float64_t m_pnegone; // probability of -1
/** maximum number of iterations */
int32_t m_maxiter;
/** code length */
int32_t m_codelen;
/** probability of zero */
float64_t m_pzero;
/** probability of +1 */
float64_t m_pposone;
/** probability of -1 */
float64_t m_pnegone;

private:
/** ensure probability sum to one
Expand Down
18 changes: 18 additions & 0 deletions src/shogun/multiclass/tree/RelaxedTree.h
Expand Up @@ -181,6 +181,7 @@ class CRelaxedTree: public CTreeMachine<RelaxedTreeNodeData>
return CMachine::train(data);
}

/** entry type */
typedef std::pair<std::pair<int32_t, int32_t>, float64_t> entry_t;
protected:
/** apply to one instance.
Expand All @@ -199,25 +200,42 @@ class CRelaxedTree: public CTreeMachine<RelaxedTreeNodeData>
*/
virtual bool train_machine(CFeatures* data);

/** train node */
node_t *train_node(const SGMatrix<float64_t> &conf_mat, SGVector<int32_t> classes);
/** init node */
std::vector<entry_t> init_node(const SGMatrix<float64_t> &global_conf_mat, SGVector<int32_t> classes);
/** train node with initialization */
SGVector<int32_t> train_node_with_initialization(const CRelaxedTree::entry_t &mu_entry, SGVector<int32_t> classes, CSVM *svm);

/** compute score */
float64_t compute_score(SGVector<int32_t> mu, CSVM *svm);
/** color label space */
SGVector<int32_t> color_label_space(CSVM *svm, SGVector<int32_t> classes);
/** evaluate binary model K */
SGVector<float64_t> eval_binary_model_K(CSVM *svm);

/** enforce balance constraints upper */
void enforce_balance_constraints_upper(SGVector<int32_t> &mu, SGVector<float64_t> &delta_neg, SGVector<float64_t> &delta_pos, int32_t B_prime, SGVector<float64_t>& xi_neg_class);
/** enforce balance constraints lower */
void enforce_balance_constraints_lower(SGVector<int32_t> &mu, SGVector<float64_t> &delta_neg, SGVector<float64_t> &delta_pos, int32_t B_prime, SGVector<float64_t>& xi_neg_class);

/** maximum number of iterations */
int32_t m_max_num_iter;
/** A */
float64_t m_A;
/** B */
int32_t m_B;
/** svm C */
float64_t m_svm_C;
/** svm epsilon */
float64_t m_svm_epsilon;
/** kernel */
CKernel *m_kernel;
/** features */
CDenseFeatures<float64_t> *m_feats;
/** machine for confusion matrix computation */
CBaseMulticlassMachine *m_machine_for_confusion_matrix;
/** number of classes */
int32_t m_num_classes;
};

Expand Down
2 changes: 2 additions & 0 deletions src/shogun/multiclass/tree/RelaxedTreeNodeData.h
Expand Up @@ -18,8 +18,10 @@ namespace shogun

struct RelaxedTreeNodeData
{
/** mu */
SGVector<int32_t> mu;

/** print data */
static void print_data(const RelaxedTreeNodeData &data)
{
SG_SPRINT("left=(");
Expand Down
12 changes: 6 additions & 6 deletions src/shogun/regression/GaussianProcessRegression.cpp
Expand Up @@ -150,24 +150,24 @@ CRegressionLabels* CGaussianProcessRegression::apply_regression(CFeatures* data)
if (m_return == GP_RETURN_COV)
{
CRegressionLabels* result =
new CRegressionLabels(getCovarianceVector());
new CRegressionLabels(get_covariance_vector());

return result;
}

if (m_return == GP_RETURN_MEANS)
{
CRegressionLabels* result =
new CRegressionLabels(getMeanVector());
new CRegressionLabels(get_mean_vector());

return result;
}

else
{

SGVector<float64_t> mean_vector = getMeanVector();
SGVector<float64_t> cov_vector = getCovarianceVector();
SGVector<float64_t> mean_vector = get_mean_vector();
SGVector<float64_t> cov_vector = get_covariance_vector();

index_t size = mean_vector.vlen+cov_vector.vlen;

Expand Down Expand Up @@ -195,7 +195,7 @@ bool CGaussianProcessRegression::train_machine(CFeatures* data)
}


SGVector<float64_t> CGaussianProcessRegression::getMeanVector()
SGVector<float64_t> CGaussianProcessRegression::get_mean_vector()
{

SGVector<float64_t> m_alpha = m_method->get_alpha();
Expand Down Expand Up @@ -232,7 +232,7 @@ SGVector<float64_t> CGaussianProcessRegression::getMeanVector()
}


SGVector<float64_t> CGaussianProcessRegression::getCovarianceVector()
SGVector<float64_t> CGaussianProcessRegression::get_covariance_vector()
{

if (!m_data)
Expand Down
6 changes: 2 additions & 4 deletions src/shogun/regression/GaussianProcessRegression.h
Expand Up @@ -142,17 +142,15 @@ class CGaussianProcessRegression : public CMachine

/** get covariance vector
*
* @param data (test)data to be classified
* @return covariance vector
*/
SGVector<float64_t> getCovarianceVector();
SGVector<float64_t> get_covariance_vector();

/** get predicted mean vector
*
* @param data (test)data to be classified
* @return predicted mean vector
*/
SGVector<float64_t> getMeanVector();
SGVector<float64_t> get_mean_vector();

/** @return object name */
inline virtual const char* get_name() const
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/regression/gp/ExactInferenceMethod.h
Expand Up @@ -89,7 +89,7 @@ class CExactInferenceMethod: public CInferenceMethod
* \mu = K\alpha
* \f]
*
* where \mu is the mean and K is the prior covariance matrix
* where \f$\mu\f$ is the mean and \f$K\f$ is the prior covariance matrix
*/
virtual SGVector<float64_t> get_alpha();

Expand Down
4 changes: 2 additions & 2 deletions src/shogun/regression/gp/GaussianLikelihood.h
Expand Up @@ -53,14 +53,14 @@ class CGaussianLikelihood: public CLikelihoodModel

/** Evaluate means
*
* @param Vector of means calculated by inference method
* @param means means calculated by inference method
* @return Final means evaluated by likelihood function
*/
virtual SGVector<float64_t> evaluate_means(SGVector<float64_t>& means);

/** Evaluate variances
*
* @param Vector of variances calculated by inference method
* @param vars variances calculated by inference method
* @return Final variances evaluated by likelihood function
*/
virtual SGVector<float64_t> evaluate_variances(SGVector<float64_t>& vars);
Expand Down
14 changes: 7 additions & 7 deletions src/shogun/regression/gp/InferenceMethod.h
Expand Up @@ -81,7 +81,7 @@ class CInferenceMethod : public CDifferentiableFunction
* \mu = K\alpha
* \f]
*
* where \mu is the mean and K is the prior covariance matrix
* where \f$\mu\f$ is the mean and \f$K\f$ is the prior covariance matrix
*/
virtual SGVector<float64_t> get_alpha() = 0;

Expand All @@ -92,8 +92,8 @@ class CInferenceMethod : public CDifferentiableFunction
* L = Cholesky(sW*K*sW+I)
* \f]
*
* Where K is the prior covariance matrix, sW is the matrix returned by
* get_cholesky(), and I is the identity matrix.
* Where \f$K\f$ is the prior covariance matrix, sW is the matrix returned by
* get_cholesky(), and \f$I\f$ is the identity matrix.
*/
virtual SGMatrix<float64_t> get_cholesky() = 0;

Expand Down Expand Up @@ -137,15 +137,15 @@ class CInferenceMethod : public CDifferentiableFunction
*/
virtual void set_kernel(CKernel* kern);

/**get kernel
/**get mean
*
* @return kernel
* @return mean
*/
virtual CMeanFunction* get_mean() { SG_REF(m_mean); return m_mean; }

/**set kernel
/**set mean
*
* @param kern kernel to set
* @param m mean to set
*/
virtual void set_mean(CMeanFunction* m);

Expand Down
6 changes: 3 additions & 3 deletions src/shogun/regression/gp/LikelihoodModel.h
Expand Up @@ -43,21 +43,21 @@ class CLikelihoodModel : public CSGObject

/** get likelihood function derivative with respect to parameters
*
* @param name of parameter used to take derivative
* @param param_name of parameter used to take derivative
* @return likelihood derivative with respect to parameter
*/
virtual float64_t get_parameter_derivative(const char* param_name);

/** Evaluate means
*
* @param Vector of means calculated by inference method
* @param means means calculated by inference method
* @return Final means evaluated by likelihood function
*/
virtual SGVector<float64_t> evaluate_means(SGVector<float64_t>& means) = 0;

/** Evaluate variances
*
* @param Vector of variances calculated by inference method
* @param vars variances calculated by inference method
* @return Final variances evaluated by likelihood function
*/
virtual SGVector<float64_t> evaluate_variances(SGVector<float64_t>& vars) = 0;
Expand Down

0 comments on commit d87c378

Please sign in to comment.