Skip to content

Commit

Permalink
Merged apply() and apply(CFeatures* data) into apply(CFeatures* data=…
Browse files Browse the repository at this point in the history
…NULL)
  • Loading branch information
lisitsyn committed May 21, 2012
1 parent ecc853c commit ad7e86b
Show file tree
Hide file tree
Showing 28 changed files with 155 additions and 311 deletions.
20 changes: 6 additions & 14 deletions src/shogun/classifier/GaussianNaiveBayes.cpp
Expand Up @@ -173,8 +173,13 @@ bool CGaussianNaiveBayes::train(CFeatures* data)
return true;
}

CLabels* CGaussianNaiveBayes::apply()
CLabels* CGaussianNaiveBayes::apply(CFeatures* data)
{
if (data)
set_features(data);

ASSERT(m_features);

// init number of vectors
int32_t num_vectors = m_features->get_num_vectors();

Expand All @@ -192,19 +197,6 @@ CLabels* CGaussianNaiveBayes::apply()
return result;
};

CLabels* CGaussianNaiveBayes::apply(CFeatures* data)
{
// check data correctness
if (!data)
SG_ERROR("No features specified\n");

// set features to classify
set_features(data);

// classify using features
return apply();
};

float64_t CGaussianNaiveBayes::apply(int32_t idx)
{
// get [idx] feature vector
Expand Down
9 changes: 2 additions & 7 deletions src/shogun/classifier/GaussianNaiveBayes.h
Expand Up @@ -66,18 +66,13 @@ class CGaussianNaiveBayes : public CMachine
* @param data train examples
* @return true if successful
*/
virtual bool train(CFeatures* data = NULL);

/** classify all examples
* @return labels
*/
virtual CLabels* apply();
virtual bool train(CFeatures* data=NULL);

/** classify specified examples
* @param data examples to be classified
* @return labels corresponding to data
*/
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** classifiy specified example
* @param idx example index
Expand Down
28 changes: 12 additions & 16 deletions src/shogun/classifier/PluginEstimate.cpp
Expand Up @@ -96,8 +96,19 @@ bool CPluginEstimate::train_machine(CFeatures* data)
return true;
}

CLabels* CPluginEstimate::apply()
CLabels* CPluginEstimate::apply(CFeatures* data)
{
if (data)
{
if (data->get_feature_class() != C_STRING ||
data->get_feature_type() != F_WORD)
{
SG_ERROR("Features not of class string type word\n");
}

set_features((CStringFeatures<uint16_t>*) data);
}

ASSERT(features);
CRealLabels* result=new CRealLabels(features->get_num_vectors());
ASSERT(result->get_num_labels()==features->get_num_vectors());
Expand All @@ -108,21 +119,6 @@ CLabels* CPluginEstimate::apply()
return result;
}

CLabels* CPluginEstimate::apply(CFeatures* data)
{
if (!data)
SG_ERROR("No features specified\n");

if (data->get_feature_class() != C_STRING ||
data->get_feature_type() != F_WORD)
{
SG_ERROR("Features not of class string type word\n");
}

set_features((CStringFeatures<uint16_t>*) data);
return apply();
}

float64_t CPluginEstimate::apply(int32_t vec_idx)
{
ASSERT(features);
Expand Down
8 changes: 1 addition & 7 deletions src/shogun/classifier/PluginEstimate.h
Expand Up @@ -41,18 +41,12 @@ class CPluginEstimate: public CMachine
CPluginEstimate(float64_t pos_pseudo=1e-10, float64_t neg_pseudo=1e-10);
virtual ~CPluginEstimate();

/** classify objects using the currently set features
*
* @return classified labels
*/
CLabels* apply();

/** classify objects
*
* @param data (test)data to be classified
* @return classified labels
*/
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** set features
*
Expand Down
21 changes: 9 additions & 12 deletions src/shogun/classifier/QDA.cpp
Expand Up @@ -68,8 +68,16 @@ void CQDA::cleanup()
m_num_classes = 0;
}

CLabels* CQDA::apply()
CLabels* CQDA::apply(CFeatures* data)
{
if (data)
{
if (!data->has_property(FP_DOT))
SG_ERROR("Specified features are not of type CDotFeatures\n");

set_features((CDotFeatures*) data);
}

if ( !m_features )
return NULL;

Expand Down Expand Up @@ -135,17 +143,6 @@ CLabels* CQDA::apply()
return out;
}

CLabels* CQDA::apply(CFeatures* data)
{
if ( !data )
SG_ERROR("No features specified\n");
if ( !data->has_property(FP_DOT) )
SG_ERROR("Specified features are not of type CDotFeatures\n");

set_features((CDotFeatures*) data);
return apply();
}

bool CQDA::train_machine(CFeatures* data)
{
if ( !m_labels )
Expand Down
8 changes: 1 addition & 7 deletions src/shogun/classifier/QDA.h
Expand Up @@ -54,18 +54,12 @@ class CQDA : public CMachine

virtual ~CQDA();

/** apply QDA to all examples
*
* @return resulting labels
*/
virtual CLabels* apply();

/** apply QDA to data
*
* @param data (test) data to be classified
* @return labels result of classification
*/
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** set store_covs
*
Expand Down
29 changes: 13 additions & 16 deletions src/shogun/classifier/svm/WDSVMOcas.cpp
Expand Up @@ -98,8 +98,20 @@ CWDSVMOcas::~CWDSVMOcas()
{
}

CLabels* CWDSVMOcas::apply()
CLabels* CWDSVMOcas::apply(CFeatures* data)
{
if (data)
{
if (data->get_feature_class() != C_STRING ||
data->get_feature_type() != F_BYTE)
{
SG_ERROR("Features not of class string type byte\n");
}

set_features((CStringFeatures<uint8_t>*) data);
}
ASSERT(features);

set_wd_weights();
set_normalization_const();

Expand All @@ -120,21 +132,6 @@ CLabels* CWDSVMOcas::apply()
return NULL;
}

CLabels* CWDSVMOcas::apply(CFeatures* data)
{
if (!data)
SG_ERROR("No features specified\n");

if (data->get_feature_class() != C_STRING ||
data->get_feature_type() != F_BYTE)
{
SG_ERROR("Features not of class string type byte\n");
}

set_features((CStringFeatures<uint8_t>*) data);
return apply();
}

int32_t CWDSVMOcas::set_wd_weights()
{
ASSERT(degree>0 && degree<=8);
Expand Down
8 changes: 1 addition & 7 deletions src/shogun/classifier/svm/WDSVMOcas.h
Expand Up @@ -148,18 +148,12 @@ class CWDSVMOcas : public CMachine
*/
inline int32_t get_degree() { return degree; }

/** classify all examples
*
* @return resulting labels
*/
CLabels* apply();

/** classify objects
*
* @param data (test)data to be classified
* @return classified labels
*/
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** classify one example
*
Expand Down
5 changes: 0 additions & 5 deletions src/shogun/clustering/Hierarchical.cpp
Expand Up @@ -197,11 +197,6 @@ void CHierarchical::store_model_features()
}

CLabels* CHierarchical::apply(CFeatures* data)
{
return apply();
}

CLabels* CHierarchical::apply()
{
SG_ERROR("apply(...) not implemented for %s!\n", get_name());
return NULL;
Expand Down
5 changes: 1 addition & 4 deletions src/shogun/clustering/Hierarchical.h
Expand Up @@ -119,10 +119,7 @@ class CHierarchical : public CDistanceMachine
virtual void store_model_features();

/** NOT IMPLEMENTED */
virtual CLabels* apply();

/** NOT IMPLEMENTED */
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** NOT IMPLEMENTED */
virtual float64_t apply(int32_t num);
Expand Down
18 changes: 0 additions & 18 deletions src/shogun/lib/external/shogun_liblinear.cpp
Expand Up @@ -642,24 +642,6 @@ void Solver_MCSVM_CS::solve()
if (iter >= max_iter)
SG_SINFO("Warning: reaching max number of iterations\n");

// calculate objective value
/*
double v = 0;
int nSV = 0;
for(i=0;i<w_size*nr_class;i++)
v += w[i]*w[i];
v = 0.5*v;
for(i=0;i<l*nr_class;i++)
{
v += alpha[i];
if(fabs(alpha[i]) > 0)
nSV++;
}
for(i=0;i<l;i++)
v -= alpha[i*nr_class+prob->y[i]];
SG_SINFO("Objective value = %f\n",v);
SG_SINFO("nSV = %d\n",nSV);
*/
SG_FREE(tx);
}

Expand Down
44 changes: 22 additions & 22 deletions src/shogun/machine/DistanceMachine.cpp
Expand Up @@ -206,28 +206,28 @@ void* CDistanceMachine::run_distance_thread_rhs(void* p)

CLabels* CDistanceMachine::apply(CFeatures* data)
{
ASSERT(data);

/* set distance features to given ones and apply to all */
CFeatures* lhs=distance->get_lhs();
distance->init(lhs, data);
SG_UNREF(lhs);

/* build result labels and classify all elements of procedure */
CRealLabels* result=new CRealLabels(data->get_num_vectors());
for (index_t i=0; i<data->get_num_vectors(); ++i)
result->set_label(i, apply(i));

return result;
}

CLabels* CDistanceMachine::apply()
{
/* call apply on complete right hand side */
CFeatures* all=distance->get_rhs();
CLabels* result=apply(all);
SG_UNREF(all);
return result;
if (data)
{
/* set distance features to given ones and apply to all */
CFeatures* lhs=distance->get_lhs();
distance->init(lhs, data);
SG_UNREF(lhs);

/* build result labels and classify all elements of procedure */
CRealLabels* result=new CRealLabels(data->get_num_vectors());
for (index_t i=0; i<data->get_num_vectors(); ++i)
result->set_label(i, apply(i));
return result;
}
else
{
/* call apply on complete right hand side */
CFeatures* all=distance->get_rhs();
CLabels* result=apply(all);
SG_UNREF(all);
return result;
}
return NULL;
}

float64_t CDistanceMachine::apply(int32_t num)
Expand Down
10 changes: 1 addition & 9 deletions src/shogun/machine/DistanceMachine.h
Expand Up @@ -84,22 +84,14 @@ class CDistanceMachine : public CMachine
*/
virtual const char* get_name() const { return "DistanceMachine"; }

/** Classify all rhs features using the built clusters.
* Cluster index with smallest distance to to be classified element is
* returned
*
* @return classified labels
*/
virtual CLabels* apply();

/** Classify all provided features.
* Cluster index with smallest distance to to be classified element is
* returned
*
* @param data (test)data to be classified
* @return classified labels
*/
virtual CLabels* apply(CFeatures* data);
virtual CLabels* apply(CFeatures* data=NULL);

/** Apply machine to one example.
* Cluster index with smallest distance to to be classified element is
Expand Down

0 comments on commit ad7e86b

Please sign in to comment.