Skip to content

Commit

Permalink
GP regression memory issues fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
lisitsyn committed Sep 2, 2012
1 parent 7e780c2 commit 684cd3a
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 17 deletions.
Expand Up @@ -219,7 +219,6 @@ int main(int argc, char **argv)
SG_UNREF(grad_search);
SG_UNREF(best_combination);
SG_UNREF(result);
SG_UNREF(mean);

exit_shogun();

Expand Down
Expand Up @@ -115,9 +115,6 @@ int main(int argc, char **argv)
CDenseFeatures<float64_t>* features2=new CDenseFeatures<float64_t> ();
features2->set_feature_matrix(matrix2);

SG_REF(features);
SG_REF(features2);

SG_REF(labels);

/*Allocate our Kernel*/
Expand All @@ -127,14 +124,17 @@ int main(int argc, char **argv)

/*Allocate our mean function*/
CZeroMean* mean = new CZeroMean();

/*Allocate our likelihood function*/
CGaussianLikelihood* lik = new CGaussianLikelihood();

//SG_SPRINT("features2 bef inf rc= %d\n",features2->ref_count());

/*Allocate our inference method*/
CFITCInferenceMethod* inf =
new CFITCInferenceMethod(test_kernel,
features, mean, labels, lik, features2);
//SG_SPRINT("features2 aft inf rc= %d\n",features2->ref_count());

SG_REF(inf);

Expand Down Expand Up @@ -191,11 +191,10 @@ int main(int argc, char **argv)
SGVector<float64_t> diagonal = inf->get_diagonal_vector();
SGMatrix<float64_t> cholesky = inf->get_cholesky();
gp->set_return_type(CGaussianProcessRegression::GP_RETURN_COV);

CRegressionLabels* covariance = gp->apply_regression(features);

gp->set_return_type(CGaussianProcessRegression::GP_RETURN_MEANS);

CRegressionLabels* predictions = gp->apply_regression();

alpha.display_vector("Alpha Vector");
Expand Down
Expand Up @@ -218,7 +218,6 @@ int main(int argc, char **argv)
SG_UNREF(grad_search);
SG_UNREF(best_combination);
SG_UNREF(result);
SG_UNREF(mean);

exit_shogun();

Expand Down
4 changes: 2 additions & 2 deletions src/shogun/lib/Map.h
Expand Up @@ -439,8 +439,8 @@ IGNORE_IN_CLASSLIST template<class K, class T> class CMap: public CSGObject
CMapNode<K, T>* element = array->get_element(i);
if (element!=NULL)
{
//element->key.~K();
//element->data.~T();
element->key.~K();
element->data.~T();

if (use_sg_mallocs)
SG_FREE(element);
Expand Down
1 change: 1 addition & 0 deletions src/shogun/regression/GaussianProcessRegression.cpp
Expand Up @@ -237,6 +237,7 @@ SGVector<float64_t> CGaussianProcessRegression::get_mean_vector()
result_vector = lik->evaluate_means(result_vector);

SG_UNREF(lik);
SG_UNREF(mean_function);

return result_vector;
}
Expand Down
14 changes: 8 additions & 6 deletions src/shogun/regression/gp/FITCInferenceMethod.cpp
Expand Up @@ -319,6 +319,8 @@ get_marginal_likelihood_derivatives(CMap<TParameter*,
m_kernel->init(m_latent_features, m_latent_features);
derivuu = m_kernel->get_parameter_gradient(param, obj);

m_kernel->remove_lhs_and_rhs();

mean_derivatives = m_mean->get_parameter_derivative(
param, obj, m_feature_matrix, g);

Expand All @@ -342,6 +344,8 @@ get_marginal_likelihood_derivatives(CMap<TParameter*,

m_kernel->init(m_latent_features, m_latent_features);
derivuu = m_kernel->get_parameter_gradient(param, obj);

m_kernel->remove_lhs_and_rhs();
}

sum[0] = 0;
Expand Down Expand Up @@ -440,6 +444,8 @@ get_marginal_likelihood_derivatives(CMap<TParameter*,
m_kernel->init(m_latent_features, m_latent_features);
derivuu = m_kernel->get_kernel_matrix();

m_kernel->remove_lhs_and_rhs();

MatrixXd ddiagKi(deriv.num_cols, deriv.num_rows);
MatrixXd dKuui(derivuu.num_cols, derivuu.num_rows);
MatrixXd dKui(derivtru.num_cols, derivtru.num_rows);
Expand Down Expand Up @@ -620,17 +626,13 @@ SGMatrix<float64_t> CFITCInferenceMethod::get_cholesky()

void CFITCInferenceMethod::update_train_kernel()
{
m_kernel->cleanup();

m_kernel->init(m_features, m_features);

//K(X, X)
SGMatrix<float64_t> kernel_matrix = m_kernel->get_kernel_matrix();

m_ktrtr=kernel_matrix.clone();

m_kernel->cleanup();

m_kernel->init(m_latent_features, m_latent_features);

//K(X, X)
Expand All @@ -643,12 +645,12 @@ void CFITCInferenceMethod::update_train_kernel()
m_kuu(i,j) = kernel_matrix(i,j)*m_scale*m_scale;
}

m_kernel->cleanup();

m_kernel->init(m_latent_features, m_features);

kernel_matrix = m_kernel->get_kernel_matrix();

m_kernel->remove_lhs_and_rhs();

m_ktru = SGMatrix<float64_t>(kernel_matrix.num_rows, kernel_matrix.num_cols);

for (index_t i = 0; i < kernel_matrix.num_rows; i++)
Expand Down
5 changes: 4 additions & 1 deletion src/shogun/regression/gp/InferenceMethod.cpp
Expand Up @@ -19,6 +19,8 @@ using namespace shogun;

CInferenceMethod::CInferenceMethod()
{
init();

m_kernel = NULL;
m_model = NULL;
m_labels = NULL;
Expand All @@ -39,7 +41,8 @@ CInferenceMethod::CInferenceMethod(CKernel* kern, CFeatures* feat,
set_mean(m);
}

CInferenceMethod::~CInferenceMethod() {
CInferenceMethod::~CInferenceMethod()
{
SG_UNREF(m_kernel);
SG_UNREF(m_features);
SG_UNREF(m_latent_features);
Expand Down

0 comments on commit 684cd3a

Please sign in to comment.