Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
fixed a few problems with the new label architecture in CPLEX depende…
…nt classes
  • Loading branch information
cwidmer committed Jun 14, 2012
1 parent 09518d6 commit 33a2582
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 11 deletions.
8 changes: 4 additions & 4 deletions src/shogun/classifier/LPM.cpp
Expand Up @@ -14,6 +14,7 @@

#include <shogun/classifier/LPM.h>
#include <shogun/labels/Labels.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/mathematics/Math.h>
#include <shogun/mathematics/Cplex.h>

Expand Down Expand Up @@ -44,9 +45,8 @@ bool CLPM::train_machine(CFeatures* data)
int32_t num_vec=features->get_num_vectors();

ASSERT(num_vec==num_train_labels);
SG_FREE(w);
w=SG_MALLOC(float64_t, num_feat);
w_dim=num_feat;

w = SGVector<float64_t>(num_feat);

int32_t num_params=1+2*num_feat+num_vec; //b,w+,w-,xi
float64_t* params=SG_MALLOC(float64_t, num_params);
Expand All @@ -55,7 +55,7 @@ bool CLPM::train_machine(CFeatures* data)
CCplex solver;
solver.init(E_LINEAR);
SG_INFO("C=%f\n", C1);
solver.setup_lpm(C1, (CSparseFeatures<float64_t>*) features, m_labels, get_bias_enabled());
solver.setup_lpm(C1, (CSparseFeatures<float64_t>*) features, (CBinaryLabels*)m_labels, get_bias_enabled());
if (get_max_train_time()>0)
solver.set_time_limit(get_max_train_time());
bool result=solver.optimize(params);
Expand Down
9 changes: 5 additions & 4 deletions src/shogun/classifier/SubGradientLPM.cpp
Expand Up @@ -222,17 +222,18 @@ void CSubGradientLPM::update_active(int32_t num_feat, int32_t num_vec)
{
for (int32_t i=0; i<num_vec; i++)
{
int32_t lab = ((CBinaryLabels*) m_labels)->get_int_label(i);
if (active[i]==1 && old_active[i]!=1)
{
features->add_to_dense_vec(C1*get_label(i), i, sum_CXy_active, num_feat);
features->add_to_dense_vec(C1*lab, i, sum_CXy_active, num_feat);
if (use_bias)
sum_Cy_active+=C1*get_label(i);
sum_Cy_active+=C1*lab;
}
else if (old_active[i]==1 && active[i]!=1)
{
features->add_to_dense_vec(-C1*get_label(i), i, sum_CXy_active, num_feat);
features->add_to_dense_vec(-C1*lab, i, sum_CXy_active, num_feat);
if (use_bias)
sum_Cy_active-=C1*get_label(i);
sum_Cy_active-=C1*lab;
}
}

Expand Down
8 changes: 5 additions & 3 deletions src/shogun/classifier/svm/CPLEXSVM.cpp
Expand Up @@ -30,6 +30,9 @@ CCPLEXSVM::~CCPLEXSVM()

bool CCPLEXSVM::train_machine(CFeatures* data)
{
ASSERT(m_labels);
ASSERT(m_labels->get_label_type() == LT_BINARY);

bool result = false;
CCplex cplex;

Expand All @@ -44,7 +47,7 @@ bool CCPLEXSVM::train_machine(CFeatures* data)
{
int32_t n,m;
int32_t num_label=0;
SGVector<float64_t> y=m_labels->get_labels();
SGVector<float64_t> y=((CBinaryLabels*)m_labels)->get_labels();
SGMatrix<float64_t> H=kernel->get_kernel_matrix();
m=H.num_rows;
n=H.num_cols;
Expand Down Expand Up @@ -72,7 +75,7 @@ bool CCPLEXSVM::train_machine(CFeatures* data)
if (alphas[i]>0)
{
//set_alpha(j, alphas[i]*labels->get_label(i)/etas[1]);
set_alpha(j, alphas[i]*m_labels->get_label(i));
set_alpha(j, alphas[i]*((CBinaryLabels*) m_labels)->get_int_label(i));
set_support_vector(j, i);
j++;
}
Expand All @@ -84,7 +87,6 @@ bool CCPLEXSVM::train_machine(CFeatures* data)
SG_FREE(alphas);
SG_FREE(lb);
SG_FREE(ub);
H.destroy_matrix();

result = true;
}
Expand Down

0 comments on commit 33a2582

Please sign in to comment.