Skip to content

Commit

Permalink
Fixed DomainAdaptation binary svm
Browse files Browse the repository at this point in the history
  • Loading branch information
lisitsyn committed May 22, 2012
1 parent d204fcd commit fca820b
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 31 deletions.
Expand Up @@ -84,7 +84,7 @@ def classifier_domainadaptationsvm_modular(fm_train_dna=traindna,fm_test_dna=tes
dasvm = DomainAdaptationSVM(C, kernel2, labels2, svm, 1.0)
dasvm.train()

out = dasvm.apply(feats_test2).get_labels()
out = dasvm.apply_binary(feats_test2)

return out #,dasvm TODO

Expand Down
29 changes: 15 additions & 14 deletions src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp
Expand Up @@ -152,25 +152,26 @@ void CDomainAdaptationSVM::set_train_factor(float64_t factor)

CBinaryLabels* CDomainAdaptationSVM::apply_binary(CFeatures* data)
{
ASSERT(data);
ASSERT(presvm->get_bias()==0.0);

ASSERT(presvm->get_bias()==0.0);
int32_t num_examples = data->get_num_vectors();

int32_t num_examples = data->get_num_vectors();
CBinaryLabels* out_current = CSVMLight::apply_binary(data);

CBinaryLabels* out_current = CSVMLight::apply_binary(data);
// recursive call if used on DomainAdaptationSVM object
CBinaryLabels* out_presvm = presvm->apply_binary(data);

// recursive call if used on DomainAdaptationSVM object
CBinaryLabels* out_presvm = presvm->apply_binary(data);


// combine outputs
for (int32_t i=0; i!=num_examples; i++)
{
float64_t out_combined = out_current->get_label(i) + B*out_presvm->get_label(i);
out_current->set_label(i, out_combined);
}
// combine outputs
SGVector<float64_t> out_combined(num_examples);
for (int32_t i=0; i<num_examples; i++)
{
out_combined[i] = out_current->get_confidence(i) + B*out_presvm->get_confidence(i);
}
SG_UNREF(out_current);
SG_UNREF(out_presvm);

return out_current;
return new CBinaryLabels(out_combined);

}

Expand Down
34 changes: 18 additions & 16 deletions src/shogun/transfer/domain_adaptation/DomainAdaptationSVMLinear.cpp
Expand Up @@ -137,7 +137,7 @@ bool CDomainAdaptationSVMLinear::train_machine(CFeatures* train_data)
// pre-compute linear term
for (int32_t i=0; i!=num_training_points; i++)
{
lin_term[i] = train_factor * B * labels->get_label(i) * parent_svm_out->get_label(i) - 1.0;
lin_term[i] = train_factor * B * labels->get_confidence(i) * parent_svm_out->get_confidence(i) - 1.0;
}

// set linear term for QP
Expand Down Expand Up @@ -212,27 +212,29 @@ void CDomainAdaptationSVMLinear::set_train_factor(float64_t factor)

CBinaryLabels* CDomainAdaptationSVMLinear::apply_binary(CFeatures* data)
{
ASSERT(presvm->get_bias()==0.0);
ASSERT(presvm->get_bias()==0.0);

int32_t num_examples = data->get_num_vectors();
int32_t num_examples = data->get_num_vectors();

CBinaryLabels* out_current = CLibLinear::apply_binary(data);
CBinaryLabels* out_current = CLibLinear::apply_binary(data);

if (presvm)
{
// recursive call if used on DomainAdaptationSVM object
CBinaryLabels* out_presvm = presvm->apply_binary(data);
SGVector<float64_t> out_combined(num_examples);
if (presvm)
{
// recursive call if used on DomainAdaptationSVM object
CBinaryLabels* out_presvm = presvm->apply_binary(data);


// combine outputs
for (int32_t i=0; i!=num_examples; i++)
{
float64_t out_combined = out_current->get_label(i) + B*out_presvm->get_label(i);
out_current->set_label(i, out_combined);
}
}
// combine outputs
for (int32_t i=0; i!=num_examples; i++)
out_combined[i] = out_current->get_confidence(i) + B*out_presvm->get_confidence(i);

SG_UNREF(out_presvm);
}

SG_UNREF(out_current);

return out_current;
return new CBinaryLabels(out_combined);
}

#endif //HAVE_LAPACK
Expand Down

0 comments on commit fca820b

Please sign in to comment.