Skip to content

Commit

Permalink
add linear least squares and ridge regression
Browse files Browse the repository at this point in the history
This add linear ridge regression and a convenience class
CLeastSquaresRegression calling CLinearRidgeRegression with
regularization parameter tau=0. To not cause confusion KRR is
renamed to KernelRidgeRegression throughout examples/code.
  • Loading branch information
Soeren Sonnenburg committed Feb 14, 2012
1 parent 3649f3b commit 3e3db13
Show file tree
Hide file tree
Showing 27 changed files with 446 additions and 136 deletions.
2 changes: 1 addition & 1 deletion examples/undocumented/cmdline_static/regression_krr.sg
@@ -1,5 +1,5 @@
% kernel ridge regression
print KRR
print Kernel Ridge Regression
set_kernel GAUSSIAN REAL 10 2.1
set_features TRAIN ../data/fm_train_real.dat
set_labels TRAIN ../data/label_train_twoclass.dat
Expand Down
Expand Up @@ -17,7 +17,7 @@ public class regression_krr_modular {

Labels labels = new Labels(trainlab);

KRR krr = new KRR(tau, kernel, labels);
KernelRidgeRegression krr = new KernelRidgeRegression(tau, kernel, labels);
krr.train(feats_train);

kernel.init(feats_train, feats_test);
Expand Down
Expand Up @@ -22,7 +22,7 @@ public static void main(String argv[]) {

Labels labels = new Labels(trainlab);

KRR krr = new KRR(tau, kernel, labels);
KernelRidgeRegression krr = new KernelRidgeRegression(tau, kernel, labels);
krr.train(feats_train);

kernel.init(feats_train, feats_test);
Expand Down
Expand Up @@ -12,7 +12,7 @@
#include <shogun/features/SimpleFeatures.h>
#include <shogun/features/Labels.h>
#include <shogun/kernel/LinearKernel.h>
#include <shogun/regression/KRR.h>
#include <shogun/regression/KernelRidgeRegression.h>
#include <shogun/evaluation/CrossValidation.h>
#include <shogun/evaluation/CrossValidationSplitting.h>
#include <shogun/evaluation/MeanSquaredError.h>
Expand Down Expand Up @@ -57,7 +57,7 @@ void test_cross_validation()

/* kernel ridge regression*/
float64_t tau=0.0001;
CKRR* krr=new CKRR(tau, kernel, labels);
CKernelRidgeRegression* krr=new CKernelRidgeRegression(tau, kernel, labels);

/* evaluation criterion */
CMeanSquaredError* eval_crit=
Expand Down
Expand Up @@ -13,7 +13,7 @@
#include <shogun/features/Labels.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/kernel/PolyKernel.h>
#include <shogun/regression/KRR.h>
#include <shogun/regression/KernelRidgeRegression.h>
#include <shogun/evaluation/CrossValidation.h>
#include <shogun/evaluation/CrossValidationSplitting.h>
#include <shogun/evaluation/MeanSquaredError.h>
Expand Down Expand Up @@ -97,7 +97,7 @@ void test_cross_validation()
CLabels* labels=new CLabels(lab);

/* kernel ridge regression, only set labels for now, rest does not matter */
CKRR* krr=new CKRR(0, NULL, labels);
CKernelRidgeRegression* krr=new CKernelRidgeRegression(0, NULL, labels);

/* evaluation criterion */
CMeanSquaredError* eval_crit=
Expand Down
4 changes: 2 additions & 2 deletions examples/undocumented/lua_modular/regression_krr_modular.lua
Expand Up @@ -16,13 +16,13 @@ function regression_krr_modular (fm_train,fm_test,label_train,width,tau)

labels=modshogun.Labels(label_train)

krr=modshogun.KRR(tau, kernel, labels)
krr=modshogun.KernelRidgeRegression(tau, kernel, labels)
krr:train(feats_train)

kernel:init(feats_train, feats_test)
out = krr:apply():get_labels()
return out,kernel,krr
end

print 'KRR'
print 'KernelRidgeRegression'
regression_krr_modular(unpack(parameter_list[1]))
4 changes: 2 additions & 2 deletions examples/undocumented/matlab_and_octave/regression_krr.m
Expand Up @@ -8,8 +8,8 @@
fm_train=load_matrix('../data/fm_train_real.dat');
fm_test=load_matrix('../data/fm_test_real.dat');

% KRR
disp('KRR');
% KernelRidgeRegression
disp('KernelRidgeRegression');

tau=1.2;

Expand Down
Expand Up @@ -25,15 +25,15 @@ def evaluation_cross_validation_regression(fm_train=traindat,fm_test=testdat,lab
from shogun.Evaluation import CrossValidationSplitting
from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import KRR
from shogun.Regression import KernelRidgeRegression

# training data
features=RealFeatures(fm_train)
labels=Labels(label_train)

# kernel and predictor
kernel=GaussianKernel()
predictor=KRR(tau, kernel, labels)
predictor=KernelRidgeRegression(tau, kernel, labels)

# splitting strategy for 5 fold cross-validation (for classification its better
# to use "StratifiedCrossValidation", but here, the std x-val is used
Expand Down
Expand Up @@ -6,7 +6,7 @@
from shogun.Kernel import *
import util

util.set_title('KRR')
util.set_title('KernelRidgeRegression')

width=20

Expand All @@ -22,7 +22,7 @@
labels = util.get_labels()
train = util.get_realfeatures(pos, neg)
gk=GaussianKernel(train, train, width)
krr = KRR()
krr = KernelRidgeRegression()
krr.set_labels(labels)
krr.set_kernel(gk)
krr.set_tau(1e-3)
Expand Down
Expand Up @@ -4,7 +4,7 @@
from shogun.Kernel import *
import util

util.set_title('KRR on Sine')
util.set_title('KernelRidgeRegression on Sine')


X, Y=util.get_sinedata()
Expand All @@ -13,7 +13,7 @@
feat=RealFeatures(X)
lab=Labels(Y.flatten())
gk=GaussianKernel(feat, feat, width)
krr=KRR()
krr=KernelRidgeRegression()
krr.set_labels(lab)
krr.set_kernel(gk)
krr.set_tau(1e-6)
Expand Down
Expand Up @@ -28,7 +28,7 @@ def evaluation_cross_validation_classification(fm_train=traindat,fm_test=testdat
from shogun.Evaluation import CrossValidationSplitting
from shogun.Features import Labels
from shogun.Features import RealFeatures
from shogun.Regression import KRR
from shogun.Regression import KernelRidgeRegression
from shogun.ModelSelection import GridSearchModelSelection
from shogun.ModelSelection import ModelSelectionParameters

Expand All @@ -41,7 +41,7 @@ def evaluation_cross_validation_classification(fm_train=traindat,fm_test=testdat
labels=Labels(label_train)

# predictor, set tau=0 here, doesnt matter
predictor=KRR()
predictor=KernelRidgeRegression()

# splitting strategy for 5 fold cross-validation (for classification its better
# to use "StratifiedCrossValidation", but the standard
Expand Down
Expand Up @@ -13,11 +13,11 @@

parameter_list = [[traindat,testdat,label_traindat,0.8,1e-6],[traindat,testdat,label_traindat,0.9,1e-7]]

def regression_krr_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,width=0.8,tau=1e-6):
def regression_kernel_ridge_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,width=0.8,tau=1e-6):

from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import KRR
from shogun.Regression import KernelRidgeRegression

feats_train=RealFeatures(fm_train)
feats_test=RealFeatures(fm_test)
Expand All @@ -26,7 +26,7 @@ def regression_krr_modular (fm_train=traindat,fm_test=testdat,label_train=label_

labels=Labels(label_train)

krr=KRR(tau, kernel, labels)
krr=KernelRidgeRegression(tau, kernel, labels)
krr.train(feats_train)

kernel.init(feats_train, feats_test)
Expand All @@ -38,15 +38,15 @@ def krr_short ():
print 'KRR_short'
from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import KRR
from shogun.Regression import KernelRidgeRegression

width=0.8; tau=1e-6
krr=KRR(tau, GaussianKernel(0, width), Labels(label_train))
krr=KernelRidgeRegression(tau, GaussianKernel(0, width), Labels(label_train))
krr.train(RealFeatures(fm_train))
out = krr.apply(RealFeatures(fm_test)).get_labels()

return krr,out

if __name__=='__main__':
print 'KRR'
regression_krr_modular(*parameter_list[0])
regression_kernel_ridge_modular(*parameter_list[0])
@@ -0,0 +1,29 @@
###########################################################################
# kernel ridge regression
###########################################################################
from numpy import array
from numpy.random import seed, rand
from tools.load import LoadMatrix
lm=LoadMatrix()

traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
label_traindat = lm.load_labels('../data/label_train_twoclass.dat')


parameter_list = [[traindat,testdat,label_traindat]]

def regression_least_squares_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,tau=1e-6):

from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import LeastSquaresRegression

ls=LeastSquaresRegression(RealFeatures(traindat), Labels(label_train))
ls.train()
out = ls.apply(RealFeatures(fm_test)).get_labels()
return out,ls

if __name__=='__main__':
print 'LeastSquaresRegression'
regression_least_squares_modular(*parameter_list[0])
@@ -0,0 +1,29 @@
###########################################################################
# linear ridge regression
###########################################################################
from numpy import array
from numpy.random import seed, rand
from tools.load import LoadMatrix
lm=LoadMatrix()

traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
label_traindat = lm.load_labels('../data/label_train_twoclass.dat')


parameter_list = [[traindat,testdat,label_traindat,1e-6],[traindat,testdat,label_traindat,100]]

def regression_linear_ridge_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,tau=1e-6):

from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import LinearRidgeRegression

rr=LinearRidgeRegression(tau, RealFeatures(traindat), Labels(label_train))
rr.train()
out = rr.apply(RealFeatures(fm_test)).get_labels()
return out,rr

if __name__=='__main__':
print 'LinearRidgeRegression'
regression_linear_ridge_modular(*parameter_list[0])
8 changes: 4 additions & 4 deletions examples/undocumented/ruby_modular/regression_krr_modular.rb
Expand Up @@ -24,7 +24,7 @@ def regression_krr_modular(fm_train=traindat,fm_test=testdat,label_train=label_t

labels=Modshogun::Labels.new(label_train)

krr=Modshogun::KRR.new(tau, kernel, labels)
krr=Modshogun::KernelRidgeRegression.new(tau, kernel, labels)
krr.train(feats_train)

kernel.init(feats_train, feats_test)
Expand All @@ -39,8 +39,8 @@ def krr_short()
puts 'KRR_short'

width=0.8; tau=1e-6
# *** krr=KRR(tau, GaussianKernel(0, width), Labels(label_train))
krr=Modshogun::KRR.new(tau, GaussianKernel(0, width), Labels(label_train))
# *** krr=KernelRidgeRegression(tau, GaussianKernel(0, width), Labels(label_train))
krr=Modshogun::KernelRidgeRegression.new(tau, GaussianKernel(0, width), Labels(label_train))
#krr.set_features(tau, GaussianKernel(0, width), Labels(label_train))
krr.train(RealFeatures(fm_train))
out = krr.apply(RealFeatures(fm_test)).get_labels()
Expand All @@ -50,6 +50,6 @@ def krr_short()
end

if __FILE__ == $0
puts 'KRR'
puts 'KernelRidgeRegression'
pp regression_krr_modular(*parameter_list[0])
end
8 changes: 6 additions & 2 deletions src/interfaces/modular/Regression.i
Expand Up @@ -10,7 +10,9 @@

/* Remove C Prefix */
%rename(Regression) CRegression;
%rename(KRR) CKRR;
%rename(KernelRidgeRegression) CKernelRidgeRegression;
%rename(LinearRidgeRegression) CLinearRidgeRegression;
%rename(LeastSquaresRegression) CLeastSquaresRegression;
%rename(LibSVR) CLibSVR;
%rename(MKL) CMKL;
%rename(MKLRegression) CMKLRegression;
Expand All @@ -20,7 +22,9 @@

/* Include Class Headers to make them visible from within the target language */
%include <shogun/regression/Regression.h>
%include <shogun/regression/KRR.h>
%include <shogun/regression/KernelRidgeRegression.h>
%include <shogun/regression/LinearRidgeRegression.h>
%include <shogun/regression/LeastSquaresRegression.h>
%include <shogun/regression/svr/LibSVR.h>
%include <shogun/classifier/mkl/MKL.h>
%include <shogun/regression/svr/MKLRegression.h>
Expand Down
4 changes: 3 additions & 1 deletion src/interfaces/modular/Regression_includes.i
Expand Up @@ -2,7 +2,9 @@
#include <shogun/regression/Regression.h>
#include <shogun/machine/Machine.h>
#include <shogun/machine/KernelMachine.h>
#include <shogun/regression/KRR.h>
#include <shogun/regression/KernelRidgeRegression.h>
#include <shogun/regression/LinearRidgeRegression.h>
#include <shogun/regression/LeastSquaresRegression.h>
#include <shogun/classifier/svm/SVM.h>
#include <shogun/classifier/svm/LibSVM.h>
#include <shogun/regression/svr/LibSVR.h>
Expand Down
6 changes: 4 additions & 2 deletions src/shogun/machine/Machine.h
Expand Up @@ -43,7 +43,7 @@ enum EClassifierType
CT_LPBOOST = 120,
CT_KNN = 130,
CT_SVMLIN=140,
CT_KRR = 150,
CT_KERNELRIDGEREGRESSION = 150,
CT_GNPPSVM = 160,
CT_GMNPSVM = 170,
CT_SUBGRADIENTSVM = 180,
Expand All @@ -68,7 +68,9 @@ enum EClassifierType
CT_GAUSSIANNAIVEBAYES = 370,
CT_AVERAGEDPERCEPTRON = 380,
CT_SGDQN = 390,
CT_CONJUGATEINDEX = 400
CT_CONJUGATEINDEX = 400,
CT_LINEARRIDGEREGRESSION = 410,
CT_LEASTSQUARESREGRESSION = 420
};

/** solver type */
Expand Down

0 comments on commit 3e3db13

Please sign in to comment.