Skip to content

Commit

Permalink
many fixes in examples
Browse files Browse the repository at this point in the history
  • Loading branch information
gsomix committed Aug 29, 2012
1 parent feafbf7 commit 037f1ed
Show file tree
Hide file tree
Showing 114 changed files with 346 additions and 325 deletions.
@@ -1,6 +1,6 @@
parameter_list = [[1,7],[2,8]]

def classifier_custom_kernel_modular(C=1,dim=7):
def classifier_custom_kernel_modular (C=1,dim=7):
from shogun.Features import RealFeatures, BinaryLabels
from shogun.Kernel import CustomKernel
from shogun.Classifier import LibSVM
Expand Down
Expand Up @@ -55,7 +55,7 @@
testdna2,label_testdna2,1,3],[traindna,testdna,label_traindna,label_testdna,traindna2,label_traindna2, \
testdna2,label_testdna2,2,5]]

def classifier_domainadaptationsvm_modular(fm_train_dna=traindna,fm_test_dna=testdna, \
def classifier_domainadaptationsvm_modular (fm_train_dna=traindna,fm_test_dna=testdna, \
label_train_dna=label_traindna, \
label_test_dna=label_testdna,fm_train_dna2=traindna2,fm_test_dna2=testdna2, \
label_train_dna2=label_traindna2,label_test_dna2=label_testdna2,C=1,degree=3):
Expand Down
Expand Up @@ -9,7 +9,7 @@

parameter_list = [[traindat,testdat,label_traindat]]

def classifier_featureblock_logistic_regression(fm_train=traindat,fm_test=testdat,label_train=label_traindat):
def classifier_featureblock_logistic_regression (fm_train=traindat,fm_test=testdat,label_train=label_traindat):

from modshogun import BinaryLabels, RealFeatures, IndexBlock, IndexBlockGroup, FeatureBlockLogisticRegression

Expand Down
Expand Up @@ -6,7 +6,7 @@

parameter_list = [[traindat,testdat,label_traindat]]

def classifier_gaussiannaivebayes_modular(fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat):
def classifier_gaussiannaivebayes_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import GaussianNaiveBayes

Expand Down
Expand Up @@ -6,7 +6,7 @@

parameter_list = [[traindat,testdat,label_traindat,3],[traindat,testdat,label_traindat,3]]

def classifier_knn_modular(fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat, k=3 ):
def classifier_knn_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat, k=3 ):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import KNN
from shogun.Distance import EuclideanDistance
Expand Down
Expand Up @@ -7,7 +7,7 @@

parameter_list = [[traindat,testdat,label_traindat,0.9,1e-3],[traindat,testdat,label_traindat,0.8,1e-2]]

def classifier_liblinear_modular(fm_train_real, fm_test_real,
def classifier_liblinear_modular (fm_train_real, fm_test_real,
label_train_twoclass, C, epsilon):

from shogun.Features import RealFeatures, SparseRealFeatures, BinaryLabels
Expand Down
@@ -1,29 +1,32 @@
from numpy import *
from numpy.random import randn
from shogun.Features import *
from shogun.Classifier import *
from shogun.Kernel import *

num=1000
dist=1
width=2.1
C=1

traindata_real=concatenate((randn(2,num)-dist, randn(2,num)+dist), axis=1)
testdata_real=concatenate((randn(2,num)-dist, randn(2,num)+dist), axis=1);

trainlab=concatenate((-ones(num), ones(num)));
testlab=concatenate((-ones(num), ones(num)));

feats_train=RealFeatures(traindata_real);
feats_test=RealFeatures(testdata_real);
kernel=GaussianKernel(feats_train, feats_train, width);

labels=BinaryLabels(trainlab);
svm=LibSVM(C, kernel, labels);
svm.train();

kernel.init(feats_train, feats_test);
out=svm.apply().get_labels();
testerr=mean(sign(out)!=testlab)
print(testerr)
from numpy import mean, sign

from tools.load import LoadMatrix
lm=LoadMatrix()

traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
label_traindat = lm.load_labels('../data/label_train_twoclass.dat')

parameter_list = [[traindat,testdat,label_traindat,2.1,1]]

def classifier_libsvm_minimal_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_twoclass=label_traindat,width=2.1,C=1):
from shogun.Features import RealFeatures, BinaryLabels
from shogun.Classifier import LibSVM
from shogun.Kernel import GaussianKernel

feats_train=RealFeatures(fm_train_real);
feats_test=RealFeatures(fm_test_real);
kernel=GaussianKernel(feats_train, feats_train, width);

labels=BinaryLabels(label_train_twoclass);
svm=LibSVM(C, kernel, labels);
svm.train();

kernel.init(feats_train, feats_test);
out=svm.apply().get_labels();
testerr=mean(sign(out)!=label_train_twoclass)
print(testerr)

if __name__=='__main__':
print('LibSVM Minimal')
classifier_libsvm_minimal_modular(*parameter_list[0])
147 changes: 77 additions & 70 deletions examples/undocumented/python_modular/classifier_multiclass_ecoc.py
@@ -1,79 +1,86 @@
import classifier_multiclass_shared
import re
import time
from tools.multiclass_shared import prepare_data

# run with toy data
[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data()
[traindat, label_traindat, testdat, label_testdat] = prepare_data()
# run with opt-digits if available
#[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(False)
#[traindat, label_traindat, testdat, label_testdat] = prepare_data(False)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5]]

import shogun.Classifier as Classifier
from shogun.Classifier import ECOCStrategy
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC, LinearMulticlassMachine
from shogun.Evaluation import MulticlassAccuracy
def classifier_multiclass_ecoc (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):

def nonabstract_class(name):
try:
getattr(Classifier, name)()
except TypeError:
return False
return True
import shogun.Classifier as Classifier
from shogun.Classifier import ECOCStrategy, LibLinear, L2R_L2LOSS_SVC, LinearMulticlassMachine
from shogun.Evaluation import MulticlassAccuracy
from shogun.Features import RealFeatures, MulticlassLabels

import re
encoders = [x for x in dir(Classifier)
if re.match(r'ECOC.+Encoder', x) and nonabstract_class(x)]
decoders = [x for x in dir(Classifier)
if re.match(r'ECOC.+Decoder', x) and nonabstract_class(x)]

fea_train = RealFeatures(traindat)
fea_test = RealFeatures(testdat)
gnd_train = MulticlassLabels(label_traindat)
if label_testdat is None:
gnd_test = None
else:
gnd_test = MulticlassLabels(label_testdat)

base_classifier = LibLinear(L2R_L2LOSS_SVC)
base_classifier.set_bias_enabled(True)

print('Testing with %d encoders and %d decoders' % (len(encoders), len(decoders)))
print('-' * 70)
format_str = '%%15s + %%-10s %%-10%s %%-10%s %%-10%s'
print((format_str % ('s', 's', 's')) % ('encoder', 'decoder', 'codelen', 'time', 'accuracy'))

def run_ecoc(ier, idr):
encoder = getattr(Classifier, encoders[ier])()
decoder = getattr(Classifier, decoders[idr])()

# whether encoder is data dependent
if hasattr(encoder, 'set_labels'):
encoder.set_labels(gnd_train)
encoder.set_features(fea_train)

strategy = ECOCStrategy(encoder, decoder)
classifier = LinearMulticlassMachine(strategy, fea_train, base_classifier, gnd_train)
classifier.train()
label_pred = classifier.apply(fea_test)
if gnd_test is not None:
evaluator = MulticlassAccuracy()
acc = evaluator.evaluate(label_pred, gnd_test)
else:
acc = None

return (classifier.get_num_machines(), acc)
def nonabstract_class(name):
try:
getattr(Classifier, name)()
except TypeError:
return False
return True

encoders = [x for x in dir(Classifier)
if re.match(r'ECOC.+Encoder', x) and nonabstract_class(x)]
decoders = [x for x in dir(Classifier)
if re.match(r'ECOC.+Decoder', x) and nonabstract_class(x)]

import time
for ier in range(len(encoders)):
for idr in range(len(decoders)):
t_begin = time.clock()
(codelen, acc) = run_ecoc(ier, idr)
if acc is None:
acc_fmt = 's'
acc = 'N/A'
else:
acc_fmt = '.4f'

t_elapse = time.clock() - t_begin
print((format_str % ('d', '.3f', acc_fmt)) %
(encoders[ier][4:-7], decoders[idr][4:-7], codelen, t_elapse, acc))
fea_train = RealFeatures(fm_train_real)
fea_test = RealFeatures(fm_test_real)
gnd_train = MulticlassLabels(label_train_multiclass)
if label_test_multiclass is None:
gnd_test = None
else:
gnd_test = MulticlassLabels(label_test_multiclass)

base_classifier = LibLinear(L2R_L2LOSS_SVC)
base_classifier.set_bias_enabled(True)

print('Testing with %d encoders and %d decoders' % (len(encoders), len(decoders)))
print('-' * 70)
format_str = '%%15s + %%-10s %%-10%s %%-10%s %%-10%s'
print((format_str % ('s', 's', 's')) % ('encoder', 'decoder', 'codelen', 'time', 'accuracy'))

def run_ecoc(ier, idr):
encoder = getattr(Classifier, encoders[ier])()
decoder = getattr(Classifier, decoders[idr])()

# whether encoder is data dependent
if hasattr(encoder, 'set_labels'):
encoder.set_labels(gnd_train)
encoder.set_features(fea_train)

strategy = ECOCStrategy(encoder, decoder)
classifier = LinearMulticlassMachine(strategy, fea_train, base_classifier, gnd_train)
classifier.train()
label_pred = classifier.apply(fea_test)
if gnd_test is not None:
evaluator = MulticlassAccuracy()
acc = evaluator.evaluate(label_pred, gnd_test)
else:
acc = None

return (classifier.get_num_machines(), acc)


for ier in range(len(encoders)):
for idr in range(len(decoders)):
t_begin = time.clock()
(codelen, acc) = run_ecoc(ier, idr)
if acc is None:
acc_fmt = 's'
acc = 'N/A'
else:
acc_fmt = '.4f'

t_elapse = time.clock() - t_begin
print((format_str % ('d', '.3f', acc_fmt)) %
(encoders[ier][4:-7], decoders[idr][4:-7], codelen, t_elapse, acc))

if __name__=='__main__':
print('MulticlassECOC')
classifier_multiclass_ecoc(*parameter_list[0])

@@ -1,10 +1,10 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(False)
[traindat, label_traindat, testdat, label_testdat] = prepare_data(False)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
def classifier_multiclass_ecoc_discriminant (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC, LinearMulticlassMachine
from shogun.Classifier import ECOCStrategy, ECOCDiscriminantEncoder, ECOCHDDecoder
Expand Down Expand Up @@ -41,5 +41,5 @@ def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_r

if __name__=='__main__':
print('MulticlassMachine')
classifier_multiclasslinearmachine_modular(*parameter_list[0])
classifier_multiclass_ecoc_discriminant(*parameter_list[0])

@@ -1,10 +1,10 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data
# run with toy data
[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data()
[traindat, label_traindat, testdat, label_testdat] = prepare_data()

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
def classifier_multiclass_ecoc_ovr (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC, LinearMulticlassMachine
from shogun.Classifier import ECOCStrategy, ECOCOVREncoder, ECOCLLBDecoder, MulticlassOneVsRestStrategy
Expand Down Expand Up @@ -48,5 +48,5 @@ def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_r

if __name__=='__main__':
print('MulticlassMachine')
classifier_multiclasslinearmachine_modular(*parameter_list[0])
classifier_multiclass_ecoc_ovr(*parameter_list[0])

Expand Up @@ -21,7 +21,7 @@

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
def classifier_multiclass_ecoc_random (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,label_test_multiclass=label_testdat,lawidth=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC, LinearMulticlassMachine
from shogun.Classifier import ECOCStrategy, ECOCRandomSparseEncoder, ECOCRandomDenseEncoder, ECOCHDDecoder
Expand Down Expand Up @@ -61,5 +61,5 @@ def classifier_multiclasslinearmachine_modular (fm_train_real=traindat,fm_test_r

if __name__=='__main__':
print('MulticlassMachine')
classifier_multiclasslinearmachine_modular(*parameter_list[0])
classifier_multiclass_ecoc_random(*parameter_list[0])

@@ -1,6 +1,6 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(True)
[traindat, label_traindat, testdat, label_testdat] = prepare_data(True)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

Expand Down
@@ -1,6 +1,6 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(False)
[traindat, label_traindat, testdat, label_testdat] = prepare_data(False)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

Expand Down

This file was deleted.

@@ -1,6 +1,6 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(False)
[traindat, label_traindat, testdat, label_testdat] = prepare_data(False)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

Expand Down
@@ -1,6 +1,6 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data()
[traindat, label_traindat, testdat, label_testdat] = prepare_data()

parameter_list = [[traindat,testdat,label_traindat,2.1,1,1e-5],[traindat,testdat,label_traindat,2.2,1,1e-5]]

Expand Down
@@ -1,6 +1,6 @@
import classifier_multiclass_shared
from tools.multiclass_shared import prepare_data

[traindat, label_traindat, testdat, label_testdat] = classifier_multiclass_shared.prepare_data(False)
[traindat, label_traindat, testdat, label_testdat] = prepare_data(False)

parameter_list = [[traindat,testdat,label_traindat,label_testdat,2.1,1,1e-5],[traindat,testdat,label_traindat,label_testdat,2.2,1,1e-5]]

Expand Down

0 comments on commit 037f1ed

Please sign in to comment.