Skip to content

Commit

Permalink
fix remaining examples
Browse files Browse the repository at this point in the history
  • Loading branch information
Soeren Sonnenburg committed Aug 31, 2011
1 parent ffb7c54 commit b17aeae
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 44 deletions.
Expand Up @@ -69,7 +69,7 @@ def modelselection_grid_search_linear_modular(traindat=traindat, label_traindat=

# print best parameters
#print "best parameters:"
best_parameters.print_tree()
#best_parameters.print_tree()

# apply them and print result
best_parameters.apply_to_machine(classifier)
Expand All @@ -78,4 +78,4 @@ def modelselection_grid_search_linear_modular(traindat=traindat, label_traindat=

if __name__=='__main__':
print 'ModelSelection GridSearchLinear'
modelselection_grid_search_simple(*parameter_list[0])
modelselection_grid_search_linear_modular(*parameter_list[0])
Expand Up @@ -4,46 +4,41 @@
from modshogun import StreamingVwFeatures
from modshogun import VowpalWabbit

def create_cache():
"""Creates a binary cache from an ascii data file."""

# Open the input file as a StreamingVwFile
input_file = StreamingVwFile("../data/fm_train_sparsereal.dat")
# Default file name will be vw_cache.dat.cache
input_file.set_write_to_cache(True)

# Tell VW that the file is in SVMLight format
# Supported types are T_DENSE, T_SVMLIGHT and T_VW
input_file.set_parser_type(T_SVMLIGHT)

# Create a StreamingVwFeatures object, `True' indicating the examples are labelled
features = StreamingVwFeatures(input_file, True, 1024)

# Create a VW object from the features
vw = VowpalWabbit(features)
vw.set_no_training(True)

# Train (in this case does nothing but run over all examples)
vw.train()

def train_from_cache():
"""Train using the generated cache file."""

# Open the input cache file as a StreamingVwCacheFile
input_file = StreamingVwCacheFile("vw_cache.dat.cache");

# The rest is exactly as for normal input
features = StreamingVwFeatures(input_file, True, 1024);
vw = VowpalWabbit(features)
vw.train()
parameter_list=[['../data/fm_train_sparsereal.dat']]

def streaming_vw_createcache_modular(fname):
# First creates a binary cache from an ascii data file.
# and then trains using the StreamingVwCacheFile as input

# Open the input file as a StreamingVwFile
input_file = StreamingVwFile(fname)
# Default file name will be vw_cache.dat.cache
input_file.set_write_to_cache(True)

# Tell VW that the file is in SVMLight format
# Supported types are T_DENSE, T_SVMLIGHT and T_VW
input_file.set_parser_type(T_SVMLIGHT)

# Create a StreamingVwFeatures object, `True' indicating the examples are labelled
features = StreamingVwFeatures(input_file, True, 1024)

# Create a VW object from the features
vw = VowpalWabbit(features)
vw.set_no_training(True)

# Train (in this case does nothing but run over all examples)
vw.train()

#Finally Train using the generated cache file

# Open the input cache file as a StreamingVwCacheFile
input_file = StreamingVwCacheFile("vw_cache.dat.cache");

# The rest is exactly as for normal input
features = StreamingVwFeatures(input_file, True, 1024);
vw = VowpalWabbit(features)
vw.train()
return vw

if __name__ == "__main__":
print "Creating cache..."
create_cache()
print "Done."
print

print "Training using the cache file..."
print
train_from_cache()

streaming_vw_createcache_modular(*parameter_list[0])
8 changes: 7 additions & 1 deletion src/shogun/classifier/vw/VowpalWabbit.cpp
Expand Up @@ -219,7 +219,7 @@ void CVowpalWabbit::init(CStreamingVwFeatures* feat)
SG_REF(env);
SG_REF(reg);

quiet = false;
quiet = true;
no_training = false;
dump_interval = exp(1.);
sum_loss_since_last_dump = 0.;
Expand Down Expand Up @@ -373,6 +373,12 @@ void CVowpalWabbit::output_prediction(int32_t f, float32_t res, float32_t weight
}
}

void CVowpalWabbit::set_verbose(bool verbose)
{
quiet=verbose==false;
}


float32_t CVowpalWabbit::compute_exact_norm(VwExample* &ex, float32_t& sum_abs_x)
{
// We must traverse the features in _precisely_ the same order as during training.
Expand Down

0 comments on commit b17aeae

Please sign in to comment.