Skip to content

Commit

Permalink
Fixed least squares multitask L1/Lq regressor and made it fit with SL…
Browse files Browse the repository at this point in the history
…EPs analogous method
  • Loading branch information
lisitsyn committed Jul 1, 2012
1 parent a1b9e16 commit 6375108
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 28 deletions.
24 changes: 3 additions & 21 deletions src/shogun/lib/slep/slep_mt_lr.cpp
Expand Up @@ -29,7 +29,6 @@ slep_result_t slep_mt_lr(
double funcp = 0.0, func = 0.0;

int n_tasks = options.n_tasks;
//SG_SPRINT("N tasks = %d \n", n_tasks);

int iter = 1;
bool done = false;
Expand Down Expand Up @@ -77,10 +76,10 @@ slep_result_t slep_mt_lr(
double q_bar = 0.0;
if (options.q==1)
q_bar = CMath::ALMOST_INFTY;
if (options.q>1e-6)
else if (options.q>1e-6)
q_bar = 1;

q_bar = options.q/(options.q-1);
else
q_bar = options.q/(options.q-1);
lambda_max = 0.0;

for (i=0; i<n_feats; i++)
Expand Down Expand Up @@ -155,7 +154,6 @@ slep_result_t slep_mt_lr(
while (!done && iter <= options.max_iter)
{
beta = (alphap-1.0)/alpha;
//SG_SPRINT("beta = %f \n", beta);

for (i=0; i<n_feats*n_tasks; i++)
s[i] = w[i] + beta*wwp[i];
Expand All @@ -165,8 +163,6 @@ slep_result_t slep_mt_lr(
for (i=0; i<n_vecs; i++)
As[i] = Aw[i] + beta*(Aw[i]-Awp[i]);

//SG_SPRINT("As = %f\n",SGVector<float64_t>::dot(As,As,n_vecs));

double fun_s = 0.0;
for (i=0; i<n_tasks*n_feats; i++)
g[i] = 0.0;
Expand All @@ -192,10 +188,6 @@ slep_result_t slep_mt_lr(
}
fun_s /= n_vecs;

//SG_SPRINT("fun_s = %f\n", fun_s);

//SG_SPRINT("g = %f\n", SGVector<float64_t>::dot(g,g,n_feats*n_tasks));

for (i=0; i<n_feats*n_tasks; i++)
wp[i] = w[i];

Expand All @@ -217,11 +209,6 @@ slep_result_t slep_mt_lr(

eppMatrix(w.matrix, v, n_feats, n_tasks, lambda/L, options.q);

//SG_SPRINT("params [%d,%d,%f,%f]\n", n_feats, n_tasks, lambda/L, options.q);

//w.display_matrix();
//SG_SPRINT("w = %f \n", SGVector<float64_t>::dot(w.matrix,w.matrix,n_feats*n_tasks));

// v = x - s
for (i=0; i<n_feats*n_tasks; i++)
v[i] = w[i] - s[i];
Expand All @@ -242,9 +229,6 @@ slep_result_t slep_mt_lr(
}
fun_x /= n_vecs;

//SG_SPRINT("Aw = %f\n", SGVector<float64_t>::dot(Aw,Aw,n_vecs));
//c.display_vector();

double r_sum = SGVector<float64_t>::dot(v,v,n_feats*n_tasks);
double l_sum = fun_x - fun_s - SGVector<float64_t>::dot(v,g,n_feats*n_tasks);

Expand All @@ -255,8 +239,6 @@ slep_result_t slep_mt_lr(
}
r_sum /= 2.0;

//SG_SPRINT("sums = [%f, %f, %f]\n", r_sum, l_sum, fun_x);

if (r_sum <= 1e-20)
{
gradient_break = true;
Expand Down
15 changes: 9 additions & 6 deletions src/shogun/lib/slep/slep_mt_lsr.cpp
Expand Up @@ -28,7 +28,7 @@ SGMatrix<double> slep_mt_lsr(
double lambda, lambda_max, beta;
double funcp = 0.0, func = 0.0;

int n_tasks = options.n_nodes;
int n_tasks = options.n_tasks;

int iter = 1;
bool done = false;
Expand All @@ -51,17 +51,20 @@ SGMatrix<double> slep_mt_lsr(
double q_bar = 0.0;
if (options.q==1)
q_bar = CMath::ALMOST_INFTY;
else if (options.q>1e-6)
else if (options.q>1e6)
q_bar = 1;
else
q_bar = options.q/(options.q-1);

lambda_max = 0.0;

for (t=0; t<n_tasks; t++)
for (i=0; i<n_feats; i++)
{
double sum = 0.0;
for (t=0; t<n_tasks; t++)
sum += CMath::pow(fabs(ATy[t*n_feats+i]),q_bar);
lambda_max =
CMath::max(lambda_max,
SGVector<float64_t>::qnorm(ATy+t*n_feats, n_feats, q_bar));
CMath::max(lambda_max, CMath::pow(sum,1.0/q_bar));
}

lambda = z*lambda_max;
Expand Down Expand Up @@ -110,7 +113,7 @@ SGMatrix<double> slep_mt_lsr(
double alphap = 0.0;
double alpha = 1.0;

while (!done && iter < options.max_iter)
while (!done && iter <= options.max_iter)
{
beta = (alphap-1.0)/alpha;

Expand Down
3 changes: 2 additions & 1 deletion src/shogun/transfer/multitask/MultitaskLSRegression.cpp
Expand Up @@ -89,6 +89,7 @@ bool CMultitaskLSRegression::train_machine(CFeatures* data)
options.regularization = m_regularization;
options.termination = m_termination;
options.tolerance = m_tolerance;
options.max_iter = m_max_iter;

ETaskRelationType relation_type = m_task_relation->get_relation_type();
switch (relation_type)
Expand All @@ -98,7 +99,7 @@ bool CMultitaskLSRegression::train_machine(CFeatures* data)
CTaskGroup* task_group = (CTaskGroup*)m_task_relation;
SGVector<index_t> ind = task_group->get_SLEP_ind();
options.ind = ind.vector;
options.n_nodes = ind.vlen-1;
options.n_tasks = ind.vlen-1;

m_tasks_w = slep_mt_lsr(features, y.vector, m_z, options);
}
Expand Down

0 comments on commit 6375108

Please sign in to comment.