Commit 706b343d authored by Davis King's avatar Davis King

Changed the example program to suggest a logarithmic grid search

rather than one that takes linear steps.

--HG--
extra : convert_revision : svn%3Afdd8eb12-d10e-0410-9acb-85c331704f74/trunk%404010
parent 58862bb2
...@@ -106,9 +106,9 @@ int main() ...@@ -106,9 +106,9 @@ int main()
// should look at the model_selection_ex.cpp program for examples of more sophisticated // should look at the model_selection_ex.cpp program for examples of more sophisticated
// strategies for determining good parameter choices. // strategies for determining good parameter choices.
cout << "doing cross validation" << endl; cout << "doing cross validation" << endl;
for (double gamma = 0.00001; gamma <= 1; gamma += 0.1) for (double gamma = 0.00001; gamma <= 1; gamma *= 5)
{ {
for (double nu = 0.00001; nu < max_nu; nu += 0.1) for (double nu = 0.00001; nu < max_nu; nu *= 5)
{ {
// tell the trainer the parameters we want to use // tell the trainer the parameters we want to use
trainer.set_kernel(kernel_type(gamma)); trainer.set_kernel(kernel_type(gamma));
...@@ -125,13 +125,13 @@ int main() ...@@ -125,13 +125,13 @@ int main()
// From looking at the output of the above loop it turns out that a good value for // From looking at the output of the above loop it turns out that a good value for
// nu and gamma for this problem is 0.1 for both. So that is what we will use. // nu and gamma for this problem is 0.15625 for both. So that is what we will use.
// Now we train on the full set of data and obtain the resulting decision function. We use the // Now we train on the full set of data and obtain the resulting decision function. We use the
// value of 0.1 for nu and gamma. The decision function will return values >= 0 for samples it predicts // value of 0.15625 for nu and gamma. The decision function will return values >= 0 for samples it predicts
// are in the +1 class and numbers < 0 for samples it predicts to be in the -1 class. // are in the +1 class and numbers < 0 for samples it predicts to be in the -1 class.
trainer.set_kernel(kernel_type(0.1)); trainer.set_kernel(kernel_type(0.15625));
trainer.set_nu(0.1); trainer.set_nu(0.15625);
typedef decision_function<kernel_type> dec_funct_type; typedef decision_function<kernel_type> dec_funct_type;
typedef normalized_function<dec_funct_type> funct_type; typedef normalized_function<dec_funct_type> funct_type;
...@@ -219,7 +219,7 @@ int main() ...@@ -219,7 +219,7 @@ int main()
// Lastly, note that the decision functions we trained above involved well over 100 // Lastly, note that the decision functions we trained above involved well over 200
// basis vectors. Support vector machines in general tend to find decision functions // basis vectors. Support vector machines in general tend to find decision functions
// that involve a lot of basis vectors. This is significant because the more // that involve a lot of basis vectors. This is significant because the more
// basis vectors in a decision function, the longer it takes to classify new examples. // basis vectors in a decision function, the longer it takes to classify new examples.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment