Skip to content

Commit

Permalink
various fixes for libshogun examples
Browse files Browse the repository at this point in the history
  • Loading branch information
Soeren Sonnenburg committed May 21, 2012
1 parent e9382f3 commit 4830bda
Show file tree
Hide file tree
Showing 16 changed files with 53 additions and 52 deletions.
4 changes: 2 additions & 2 deletions examples/undocumented/libshogun/classifier_conjugateindex.cpp
@@ -1,4 +1,4 @@
#include <shogun/features/Labels.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/multiclass/ConjugateIndex.h>
#include <shogun/base/init.h>
Expand All @@ -21,7 +21,7 @@ int main(int argc, char** argv)
CDenseFeatures<float64_t>* features= new CDenseFeatures<float64_t>(matrix);

// create three labels
CLabels* labels=new CLabels(3);
CMulticlassLabels* labels=new CMulticlassLabels(3);
labels->set_label(0, 0);
labels->set_label(1, +1);
labels->set_label(2, 0);
Expand Down
@@ -1,4 +1,4 @@
#include <shogun/features/Labels.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/classifier/GaussianNaiveBayes.h>
#include <shogun/base/init.h>
Expand All @@ -21,7 +21,7 @@ int main(int argc, char** argv)
CDenseFeatures<float64_t>* features= new CDenseFeatures<float64_t>(matrix);

// create three labels
CLabels* labels=new CLabels(3);
CMulticlassLabels* labels=new CMulticlassLabels(3);
labels->set_label(0, 0);
labels->set_label(1, +1);
labels->set_label(2, +2);
Expand All @@ -31,7 +31,7 @@ int main(int argc, char** argv)

// classify on training examples
for (int32_t i=0; i<3; i++)
SG_SPRINT("output[%d]=%f\n", i, ci->apply(i));
SG_SPRINT("output[%d]=%f\n", i, ci->apply_one(i));

// free up memory
SG_UNREF(ci);
Expand Down
5 changes: 2 additions & 3 deletions examples/undocumented/libshogun/classifier_libsvm.cpp
Expand Up @@ -9,7 +9,6 @@
*/
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/labels/RealLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/classifier/svm/LibSVM.h>
#include <shogun/mathematics/Math.h>
Expand Down Expand Up @@ -89,10 +88,10 @@ int main()
printf("num_sv:%d b:%f\n", svm->get_num_support_vectors(), svm->get_bias());

// classify + display output
CRealLabels* out_labels=(CRealLabels*) svm->apply();
CBinaryLabels* out_labels=(CBinaryLabels*) svm->apply();

for (int32_t i=0; i<NUM; i++)
printf("out[%d]=%f\n", i, out_labels->get_label(i));
printf("out[%d]=%f (%f)\n", i, out_labels->get_label(i), out_labels->get_confidence(i));

SG_UNREF(out_labels);
SG_UNREF(kernel);
Expand Down
6 changes: 3 additions & 3 deletions examples/undocumented/libshogun/classifier_minimal_svm.cpp
@@ -1,4 +1,4 @@
#include <shogun/features/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/classifier/svm/LibSVM.h>
Expand Down Expand Up @@ -28,7 +28,7 @@ int main(int argc, char** argv)
features->set_feature_matrix(matrix);

// create three labels
CLabels* labels=new CLabels(3);
CBinaryLabels* labels=new CBinaryLabels(3);
labels->set_label(0, -1);
labels->set_label(1, +1);
labels->set_label(2, -1);
Expand All @@ -43,7 +43,7 @@ int main(int argc, char** argv)

// classify on training examples
for (int32_t i=0; i<3; i++)
SG_SPRINT("output[%d]=%f\n", i, svm->apply(i));
SG_SPRINT("output[%d]=%f\n", i, svm->apply_one(i));

// free up memory
SG_UNREF(svm);
Expand Down
13 changes: 7 additions & 6 deletions examples/undocumented/libshogun/classifier_mklmulticlass.cpp
Expand Up @@ -10,6 +10,7 @@
#include <iostream>
#include <shogun/io/SGIO.h>
#include <shogun/lib/ShogunException.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/kernel/CustomKernel.h>
#include <shogun/kernel/CombinedKernel.h>
#include <shogun/classifier/mkl/MKLMulticlass.h>
Expand Down Expand Up @@ -51,7 +52,7 @@ void getgauss(float64_t & y1, float64_t & y2)


void gendata(std::vector<float64_t> & x,std::vector<float64_t> & y,
CLabels*& lab)
CMulticlassLabels*& lab)
{
int32_t totalsize=240;
int32_t class1size=80;
Expand Down Expand Up @@ -83,7 +84,7 @@ void gendata(std::vector<float64_t> & x,std::vector<float64_t> & y,
}

//set labels
lab=new CLabels(x.size());
lab=new CMulticlassLabels(x.size());
for(size_t i=0; i< x.size();++i)
{
if((int32_t)i < class1size)
Expand Down Expand Up @@ -194,7 +195,7 @@ void gentestkernel(float64_t * & ker1 ,float64_t * & ker2,float64_t * & ker3,

void tester()
{
CLabels* lab=NULL;
CMulticlassLabels* lab=NULL;
std::vector<float64_t> x,y;

gendata(x,y, lab);
Expand Down Expand Up @@ -242,7 +243,7 @@ void tester()
SG_SPRINT("finished svm training\n");

//starting svm testing on training data
CLabels* res=tsvm->apply();
CMulticlassLabels* res=CMulticlassLabels::obtain_from_generic(tsvm->apply());
ASSERT(res);

float64_t err=0;
Expand All @@ -261,7 +262,7 @@ void tester()
SG_FREE(ker3);

//generate test data
CLabels* tlab=NULL;
CMulticlassLabels* tlab=NULL;

std::vector<float64_t> tx,ty;

Expand Down Expand Up @@ -304,7 +305,7 @@ void tester()
tsvm->set_kernel(tker);

//compute classification error, check mem
CLabels* tres=tsvm->apply();
CMulticlassLabels* tres=CMulticlassLabels::obtain_from_generic(tsvm->apply());

float64_t terr=0;
for(int32_t i=0; i<numdatatest;++i)
Expand Down
@@ -1,4 +1,4 @@
#include <shogun/features/Labels.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/multiclass/MulticlassLibSVM.h>
Expand Down Expand Up @@ -27,7 +27,7 @@ int main(int argc, char** argv)
CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t>(matrix);

// create three labels
CLabels* labels=new CLabels(num_vec);
CMulticlassLabels* labels=new CMulticlassLabels(num_vec);
for (index_t i=0; i<num_vec; ++i)
labels->set_label(i, i%num_class);

Expand All @@ -40,14 +40,14 @@ int main(int argc, char** argv)
svm->train();

// classify on training examples
CLabels* output=svm->apply();
CMulticlassLabels* output=CMulticlassLabels::obtain_from_generic(svm->apply());
CMath::display_vector(output->get_labels().vector, output->get_num_labels(),
"batch output");

/* assert that batch apply and apply(index_t) give same result */
for (index_t i=0; i<output->get_num_labels(); ++i)
{
float64_t label=svm->apply(i);
float64_t label=svm->apply_one(i);
SG_SPRINT("single output[%d]=%f\n", i, label);
ASSERT(output->get_label(i)==label);
}
Expand Down
@@ -1,4 +1,4 @@
#include <shogun/features/Labels.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/io/StreamingAsciiFile.h>
#include <shogun/io/SGIO.h>
#include <shogun/features/StreamingDenseFeatures.h>
Expand Down Expand Up @@ -58,7 +58,7 @@ int main(int argc, char** argv)
// Create features with the useful values from mat
CDenseFeatures< float64_t >* features = new CDenseFeatures<float64_t>(mat);

CLabels* labels = new CLabels(num_vectors);
CMulticlassLabels* labels = new CMulticlassLabels(num_vectors);
SG_REF(features);
SG_REF(labels);

Expand Down Expand Up @@ -89,7 +89,7 @@ int main(int argc, char** argv)
mc_svm->train();

// Classify the training examples and show the results
CLabels* output = mc_svm->apply();
CMulticlassLabels* output = mc_svm->apply();

SGVector< int32_t > out_labels = output->get_int_labels();
CMath::display_vector(out_labels.vector, out_labels.vlen);
Expand Down
5 changes: 3 additions & 2 deletions examples/undocumented/libshogun/classifier_qda.cpp
Expand Up @@ -9,6 +9,7 @@
*/

#include <shogun/base/init.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/classifier/QDA.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/io/SGIO.h>
Expand Down Expand Up @@ -52,7 +53,7 @@ int main(int argc, char ** argv)
gen_rand_data(lab, feat);

// Create train labels
CLabels* labels = new CLabels(lab);
CMulticlassLabels* labels = new CMulticlassLabels(lab);

// Create train features
CDenseFeatures< float64_t >* features = new CDenseFeatures< float64_t >(feat);
Expand All @@ -63,7 +64,7 @@ int main(int argc, char ** argv)
qda->train();

// Classify and display output
CLabels* out_labels = qda->apply();
CMulticlassLabels* out_labels = CMulticlassLabels::obtain_from_generic(qda->apply());
SG_REF(out_labels);

// Free memory
Expand Down
8 changes: 4 additions & 4 deletions examples/undocumented/libshogun/clustering_kmeans.cpp
Expand Up @@ -15,7 +15,7 @@
#include <shogun/modelselection/GridSearchModelSelection.h>
#include <shogun/modelselection/ModelSelectionParameters.h>
#include <shogun/modelselection/ParameterCombination.h>
#include <shogun/features/Labels.h>
#include <shogun/labels/MulticlassLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/clustering/KMeans.h>
#include <shogun/distance/EuclidianDistance.h>
Expand Down Expand Up @@ -70,9 +70,9 @@ int main(int argc, char **argv)
SG_REF(features);

/* create labels for cluster centers */
CLabels* labels=new CLabels(num_features);
CMulticlassLabels* labels=new CMulticlassLabels(num_features);
for (index_t i=0; i<num_features; ++i)
labels->set_label(i, i%2==0 ? 1 : -1);
labels->set_label(i, i%2==0 ? 0 : 1);

/* create distance */
CEuclidianDistance* distance=new CEuclidianDistance(features, features);
Expand All @@ -82,7 +82,7 @@ int main(int argc, char **argv)
clustering->train(features);

/* build clusters */
CLabels* result=clustering->apply();
CMulticlassLabels* result=CMulticlassLabels::obtain_from_generic(clustering->apply());
for (index_t i=0; i<result->get_num_labels(); ++i)
SG_SPRINT("cluster index of vector %i: %f\n", i, result->get_label(i));

Expand Down
12 changes: 6 additions & 6 deletions examples/undocumented/libshogun/kernel_machine_train_locked.cpp
Expand Up @@ -10,7 +10,7 @@

#include <shogun/base/init.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/features/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/kernel/LinearKernel.h>
#include <shogun/classifier/svm/LibSVM.h>
#include <shogun/evaluation/ContingencyTableEvaluation.h>
Expand Down Expand Up @@ -63,7 +63,7 @@ void test()

CMath::display_vector(lab.vector, lab.vlen, "training labels");

CLabels* labels=new CLabels(lab);
CBinaryLabels* labels=new CBinaryLabels(lab);
SG_REF(labels);

/* evaluation instance */
Expand Down Expand Up @@ -91,7 +91,7 @@ void test()
indices.vector[3]=4;
CMath::display_vector(indices.vector, indices.vlen, "training indices");
svm->train_locked(indices);
CLabels* output=svm->apply();
CBinaryLabels* output=CBinaryLabels::obtain_from_generic(svm->apply());
ASSERT(eval->evaluate(output, labels)==1);
CMath::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SG_UNREF(output);
Expand All @@ -102,7 +102,7 @@ void test()
indices.vector[1]=2;
indices.vector[2]=3;
CMath::display_vector(indices.vector, indices.vlen, "training indices");
output=svm->apply();
output=CBinaryLabels::obtain_from_generic(svm->apply());
ASSERT(eval->evaluate(output, labels)==1);
CMath::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SG_UNREF(output);
Expand All @@ -112,15 +112,15 @@ void test()
indices.range_fill();
CMath::display_vector(indices.vector, indices.vlen, "training indices");
svm->train_locked(indices);
output=svm->apply();
output=CBinaryLabels::obtain_from_generic(svm->apply());
ASSERT(eval->evaluate(output, labels)==1);
CMath::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SG_UNREF(output);

SG_SPRINT("normal train\n");
svm->data_unlock();
svm->train();
output=svm->apply();
output=CBinaryLabels::obtain_from_generic(svm->apply());
ASSERT(eval->evaluate(output, labels)==1);
CMath::display_vector(output->get_labels().vector, output->get_num_labels(), "output");
SG_UNREF(output);
Expand Down
Expand Up @@ -12,7 +12,7 @@
#include <shogun/modelselection/ModelSelectionParameters.h>
#include <shogun/modelselection/ParameterCombination.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/features/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/classifier/svm/LibSVM.h>

Expand Down Expand Up @@ -63,7 +63,7 @@ void apply_parameter_tree(CDynamicObjectArray* combinations)
SG_REF(features);

/* create three labels, will be handed to svm and automaticall deleted */
CLabels* labels=new CLabels(3);
CBinaryLabels* labels=new CBinaryLabels(3);
SG_REF(labels);
labels->set_label(0, -1);
labels->set_label(1, +1);
Expand Down Expand Up @@ -92,7 +92,7 @@ void apply_parameter_tree(CDynamicObjectArray* combinations)

/* classify on training examples */
for (index_t i=0; i<3; i++)
SG_SPRINT("output[%d]=%f\n", i, svm->apply(i));
SG_SPRINT("output[%d]=%f\n", i, svm->apply_one(i));

/* unset features and SG_UNREF kernel */
kernel->cleanup();
Expand Down
6 changes: 3 additions & 3 deletions examples/undocumented/libshogun/parameter_iterate_float64.cpp
Expand Up @@ -8,7 +8,7 @@
* Copyright (C) 2011 Berlin Institute of Technology and Max-Planck-Society
*/

#include <shogun/features/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/classifier/svm/LibSVM.h>
Expand Down Expand Up @@ -46,7 +46,7 @@ int main(int argc, char** argv)
kernel->init(features, features);

/* create n labels (+1,-1,+1,-1,...) */
CLabels* labels=new CLabels(n);
CBinaryLabels* labels=new CBinaryLabels(n);
for (int32_t i=0; i<n; ++i)
labels->set_label(i, i%2==0 ? +1 : -1);

Expand Down Expand Up @@ -80,7 +80,7 @@ int main(int argc, char** argv)
svm->train();
for (int32_t i=0; i<n; ++i)
SG_SPRINT("output[%d]=%f\treal[%d]=%f\n", i,
svm->apply(i), i, labels->get_label(i));
svm->apply_one(i), i, labels->get_label(i));

delete param;
}
Expand Down
Expand Up @@ -8,7 +8,7 @@
* Copyright (C) 2011 Berlin Institute of Technology and Max-Planck-Society
*/

#include <shogun/features/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/classifier/svm/LibSVM.h>
Expand Down Expand Up @@ -59,7 +59,7 @@ int main(int argc, char** argv)
}

/* create n labels (+1,-1,+1,-1,...) */
CLabels* labels=new CLabels(n);
CBinaryLabels* labels=new CBinaryLabels(n);
for (int32_t i=0; i<n; ++i)
labels->set_label(i, i%2==0 ? +1 : -1);

Expand All @@ -78,7 +78,7 @@ int main(int argc, char** argv)
svm->train();
for (int32_t i=0; i<n; ++i)
SG_SPRINT("output[%d]=%f\treal[%d]=%f\n", i,
svm->apply(i), i, labels->get_label(i));
svm->apply_one(i), i, labels->get_label(i));
}

/* free up memory: delete all Parameter instances */
Expand Down

0 comments on commit 4830bda

Please sign in to comment.