Commit f7153a8c authored by Davis King's avatar Davis King

updated docs

--HG--
extra : convert_revision : svn%3Afdd8eb12-d10e-0410-9acb-85c331704f74/trunk%403765
parent 1d204e79
......@@ -119,6 +119,8 @@
<ul>
<li><a href="ml.html#mlp">Multi layer perceptrons</a> </li>
<li><a href="ml.html#svm_nu_trainer">Support vector machines</a> for classification</li>
<li>Reduced-rank methods for large-scale <a href="ml.html#svm_c_ekm_trainer">classification</a>
and <a href="ml.html#krr_trainer">regression</a></li>
<li>Relevance vector machines for <a href="ml.html#rvm_trainer">classification</a>
and <a href="ml.html#rvm_regression_trainer">regression</a> </li>
<li>An online <a href="ml.html#krls">kernel RLS regression</a> algorithm</li>
......
......@@ -175,6 +175,14 @@
<name>Relevance_Vector_Classification</name>
<link>rvm_ex.cpp.html</link>
</item>
<item>
<name>Kernel_Ridge_Regression</name>
<link>krr_regression_ex.cpp.html</link>
</item>
<item>
<name>KRR_Classification</name>
<link>krr_classification_ex.cpp.html</link>
</item>
<item>
<name>Support_Vector_Machine</name>
<link>svm_ex.cpp.html</link>
......
......@@ -74,6 +74,7 @@ Davis E. King. <a href="http://www.jmlr.org/papers/volume10/king09a/king09a.pdf"
<item>svm_c_linear_trainer</item>
<item>svm_c_ekm_trainer</item>
<item>rvm_trainer</item>
<item>krr_trainer</item>
<item>rvm_regression_trainer</item>
<item>rbf_network_trainer</item>
<item>rank_features</item>
......@@ -867,6 +868,29 @@ Davis E. King. <a href="http://www.jmlr.org/papers/volume10/king09a/king09a.pdf"
</component>
<!-- ************************************************************************* -->
<component>
<name>krr_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/krr_trainer_abstract.h</spec_file>
<description>
<p>
Performs kernel ridge regression and outputs a <a href="#decision_function">decision_function</a> that
represents the learned function.
</p>
The implementation is done using the <a href="#empirical_kernel_map">empirical_kernel_map</a> and
<a href="#linearly_independent_subset_finder">linearly_independent_subset_finder</a>
and thus allows you to run the algorithm on large datasets and obtain sparse outputs. It is also
capable of automatically estimating its regularization parameter using leave-one-out cross-validation.
</description>
<examples>
<example>krr_regression_ex.cpp.html</example>
<example>krr_classification_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
......
......@@ -155,6 +155,7 @@
<term file="ml.html" name="svm_c_linear_trainer"/>
<term file="ml.html" name="svm_c_ekm_trainer"/>
<term file="ml.html" name="rvm_trainer"/>
<term file="ml.html" name="krr_trainer"/>
<term file="ml.html" name="rvm_regression_trainer"/>
<term file="ml.html" name="rbf_network_trainer"/>
<term file="ml.html" name="reduced"/>
......@@ -167,6 +168,9 @@
<term file="ml.html" name="test_binary_decision_function"/>
<term link="ml.html#svm_nu_trainer" name="support vector machine"/>
<term link="ml.html#rvm_trainer" name="relevance vector machine"/>
<term link="ml.html#krr_trainer" name="kernel ridge regression"/>
<term link="ml.html#krr_trainer" name="regularized least squares"/>
<term link="ml.html#krr_trainer" name="least squares SVM"/>
<term file="ml.html" name="krls"/>
<term file="ml.html" name="kcentroid"/>
<term file="ml.html" name="linearly_independent_subset_finder"/>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment