Commit 937f61b8 authored by Davis King's avatar Davis King

Reorganized the stuff on the algorithms page. I pulled the machine learning

and bayesian networking stuff out and gave them their own pages.

--HG--
extra : convert_revision : svn%3Afdd8eb12-d10e-0410-9acb-85c331704f74/trunk%402933
parent 0955ec45
...@@ -11,9 +11,9 @@ ...@@ -11,9 +11,9 @@
<p> <p>
This page documents library components that are all basically just implementations of This page documents library components that are all basically just implementations of
mathematical functions or algorithms without any really significant data structures mathematical functions or algorithms that don't fit in any of the other pages
associated with them. So this includes things like checksums, cryptographic hashes, of the dlib documentation. So this includes things like checksums, cryptographic hashes,
machine learning algorithms, sorting, etc... optimization, sorting, etc...
</p> </p>
</body> </body>
...@@ -28,8 +28,6 @@ ...@@ -28,8 +28,6 @@
<item>crc32</item> <item>crc32</item>
<item>rand</item> <item>rand</item>
<item>running_stats</item> <item>running_stats</item>
<item>vector_normalizer</item>
<item>mlp</item>
<item nolink="true"> <item nolink="true">
<name>Optimization</name> <name>Optimization</name>
<sub> <sub>
...@@ -65,34 +63,6 @@ ...@@ -65,34 +63,6 @@
<item>nearest_point</item> <item>nearest_point</item>
</sub> </sub>
</item> </item>
<item nolink="true">
<name>Bayes Utilities</name>
<sub>
<item>assignment</item>
<item>joint_probability_table</item>
<item>conditional_probability_table</item>
<item>bayes_node</item>
<item>bayesian_network_gibbs_sampler</item>
<item>bayesian_network_join_tree</item>
<item nolink="true">
<name>Bayes Node Utilities</name>
<sub>
<item>set_node_value</item>
<item>node_value</item>
<item>node_is_evidence</item>
<item>set_node_as_evidence</item>
<item>set_node_as_nonevidence</item>
<item>set_node_num_values</item>
<item>node_num_values</item>
<item>node_probability</item>
<item>set_node_probability</item>
<item>node_first_parent_assignment</item>
<item>node_next_parent_assignment</item>
<item>node_cpt_filled_out</item>
</sub>
</item>
</sub>
</item>
<item nolink="true"> <item nolink="true">
<name>Graph Utilities</name> <name>Graph Utilities</name>
<sub> <sub>
...@@ -111,54 +81,6 @@ ...@@ -111,54 +81,6 @@
<item>create_join_tree</item> <item>create_join_tree</item>
</sub> </sub>
</item> </item>
<item nolink="true">
<name>Kernel_Methods</name>
<sub>
<item nolink="true">
<name>Kernels</name>
<sub>
<item>radial_basis_kernel</item>
<item>polynomial_kernel</item>
<item>sigmoid_kernel</item>
<item>linear_kernel</item>
<item>offset_kernel</item>
</sub>
</item>
<item nolink="true">
<name>Function_Objects</name>
<sub>
<item>decision_function</item>
<item>distance_function</item>
<item>probabilistic_decision_function</item>
<item>normalized_function</item>
</sub>
</item>
<item>pick_initial_centers</item>
<item>krls</item>
<item>kcentroid</item>
<item>linearly_independent_subset_finder</item>
<item>kkmeans</item>
<item>svm_nu_trainer</item>
<item>rvm_trainer</item>
<item>rvm_regression_trainer</item>
<item>rbf_network_trainer</item>
<item>train_probabilistic_decision_function</item>
<item>cross_validate_trainer</item>
<item>test_binary_decision_function</item>
<item>cross_validate_trainer_threaded</item>
<item>rank_features</item>
<item>reduced_decision_function_trainer</item>
<item>reduced</item>
<item>reduced_decision_function_trainer2</item>
<item>reduced2</item>
<item>batch</item>
<item>verbose_batch</item>
<item>batch_trainer</item>
<item>svm_pegasos</item>
</sub>
</item>
<item>randomize_samples</item>
<item>is_binary_classification_problem</item>
<item>hsort_array</item> <item>hsort_array</item>
<item>isort_array</item> <item>isort_array</item>
<item>put_in_range</item> <item>put_in_range</item>
...@@ -186,83 +108,6 @@ ...@@ -186,83 +108,6 @@
<components> <components>
<!-- ************************************************************************* -->
<component>
<name>bayesian_network_join_tree</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents an implementation of the join tree algorithm
(a.k.a. the junction tree algorithm)
for inference in bayesian networks.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
<example>bayes_net_from_disk_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bayesian_network_gibbs_sampler</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object performs Markov Chain Monte Carlo sampling of a bayesian
network using the Gibbs sampling technique.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bayes_node</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a node in a bayesian network. It is
intended to be used inside the <a href="containers.html#directed_graph">directed_graph</a> object to
represent bayesian networks.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>conditional_probability_table</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a conditional probability table.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>joint_probability_table</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a joint probability table.
</description>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
...@@ -291,25 +136,6 @@ ...@@ -291,25 +136,6 @@
</component> </component>
<!-- ************************************************************************* -->
<component>
<name>pick_initial_centers</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kkmeans_abstract.h</spec_file>
<description>
This is a function that you can use to seed data clustering algorithms
like the <a href="#kkmeans">kkmeans</a> clustering method. What it
does is pick reasonable starting points for clustering by basically
trying to find a set of points that are all far away from each other.
</description>
<examples>
<example>kkmeans_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
...@@ -403,221 +229,6 @@ ...@@ -403,221 +229,6 @@
</component> </component>
<!-- ************************************************************************* -->
<component>
<name>assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object models an assignment of random variables to particular values.
It is used with the <a href="#joint_probability_table">joint_probability_table</a> and
<a href="#conditional_probability_table">conditional_probability_table</a>
objects to represent assignments of various random variables to actual values.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_probability</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the probability of a
<a href="#bayes_node">bayes_node</a> given its parents when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_first_parent_assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain an <a href="#assignment">assignment</a>
that contains all the parents of a node in a bayesian network.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_next_parent_assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily loop through all the parent <a href="#assignment">assignments</a>
of a node in a bayesian network.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_cpt_filled_out</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily verify that a node
in a bayesian network has its <a href="#conditional_probability_table">conditional_probability_table</a>
completely filled out.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_probability</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the probability of a
<a href="#bayes_node">bayes_node</a> given its parents when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_num_values</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the number of values of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_num_values</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the number of values of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_as_nonevidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily remove the evidence flag of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_as_evidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the evidence flag of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_is_evidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily determine if a
<a href="#bayes_node">bayes_node</a> is evidence when it is inside
a <a href="containers.html#directed_graph"> directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_value</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the value of a
<a href="#bayes_node">bayes_node</a> when it is inside a <a href="containers.html#directed_graph">
directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_value</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily modify the value of a
<a href="#bayes_node">bayes_node</a> when it is inside a <a href="containers.html#directed_graph">
directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component checked="true"> <component checked="true">
...@@ -779,45 +390,6 @@ ...@@ -779,45 +390,6 @@
</description> </description>
</component> </component>
<!-- ************************************************************************* -->
<component checked="true">
<name>mlp</name>
<file>dlib/mlp.h</file>
<spec_file>dlib/mlp/mlp_kernel_abstract.h</spec_file>
<description>
This object represents a multilayer layer perceptron network that is
trained using the back propagation algorithm. The training algorithm also
incorporates the momentum method. That is, each round of back propagation
training also adds a fraction of the previous update. This fraction
is controlled by the momentum term set in the constructor.
</description>
<examples>
<example>mlp_ex.cpp.html</example>
</examples>
<implementations>
<implementation>
<name>mlp_kernel_1</name>
<file>dlib/mlp/mlp_kernel_1.h</file>
<description>
This is implemented in the obvious way.
</description>
<typedefs>
<typedef>
<name>kernel_1a</name>
<description>is a typedef for mlp_kernel_1</description>
</typedef>
</typedefs>
</implementation>
</implementations>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
...@@ -843,663 +415,21 @@ ...@@ -843,663 +415,21 @@
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
<name>krls</name> <name>running_stats</name>
<file>dlib/svm.h</file> <file>dlib/statistics.h</file>
<spec_file link="true">dlib/svm/krls_abstract.h</spec_file> <spec_file link="true">dlib/statistics/statistics_abstract.h</spec_file>
<description>
This is an implementation of the kernel recursive least squares algorithm
described in the paper The Kernel Recursive Least Squares Algorithm by Yaakov Engel.
<p>
The long and short of this algorithm is that it is an online kernel based
regression algorithm. You give it samples (x,y) and it learns the function
f(x) == y. For a detailed description of the algorithm read the above paper.
</p>
</description>
<examples>
<example>krls_ex.cpp.html</example>
<example>krls_filter_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>svm_pegasos</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This object implements an online algorithm for training a support
vector machine for solving binary classification problems.
<p>
The implementation of the Pegasos algorithm used by this object is based
on the following excellent paper:
<blockquote>
Pegasos: Primal estimated sub-gradient solver for SVM (2007)
by Yoram Singer, Nathan Srebro
In ICML
</blockquote>
</p>
<p>
This SVM training algorithm has two interesting properties. First, the
pegasos algorithm itself converges to the solution in an amount of time
unrelated to the size of the training set (in addition to being quite fast
to begin with). This makes it an appropriate algorithm for learning from
very large datasets. Second, this object uses the <a href="#kcentroid">kcentroid</a> object
to maintain a sparse approximation of the learned decision function.
This means that the number of support vectors in the resulting decision
function is also unrelated to the size of the dataset (in normal SVM
training algorithms, the number of support vectors grows approximately
linearly with the size of the training set).
</p>
</description>
<examples>
<example>svm_pegasos_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>kkmeans</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kkmeans_abstract.h</spec_file>
<description> <description>
This is an implementation of a kernelized k-means clustering algorithm. This object represents something that can compute the running mean and
It performs k-means clustering by using the <a href="#kcentroid">kcentroid</a> object. variance of a stream of real numbers.
</description> </description>
<examples> <examples>
<example>kkmeans_ex.cpp.html</example> <example>kcentroid_ex.cpp.html</example>
</examples> </examples>
</component> </component>
<!-- ************************************************************************* -->
<component>
<name>vector_normalizer</name>
<file>dlib/statistics.h</file>
<spec_file link="true">dlib/statistics/statistics_abstract.h</spec_file>
<description>
This object represents something that can learn to normalize a set
of column vectors. In particular, normalized column vectors should
have zero mean and a variance of one.
Also, if desired, this object can use principal component
analysis for the purposes of reducing the number of elements in a
vector.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>running_stats</name>
<file>dlib/statistics.h</file>
<spec_file link="true">dlib/statistics/statistics_abstract.h</spec_file>
<description>
This object represents something that can compute the running mean and
variance of a stream of real numbers.
</description>
<examples>
<example>kcentroid_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>linearly_independent_subset_finder</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/linearly_independent_subset_finder_abstract.h</spec_file>
<description>
<p>
This is an implementation of an online algorithm for recursively finding a
set of linearly independent vectors in a kernel induced feature space. To
use it you decide how large you would like the set to be and then you feed it
sample points.
</p>
<p>
Each time you present it with a new sample point it either
keeps the current set of independent points unchanged, or if the new point
is "more linearly independent" than one of the points it already has,
it replaces the weakly linearly independent point with the new one.
</p>
<p>
This object uses the Approximately Linearly Dependent metric described in the paper
The Kernel Recursive Least Squares Algorithm by Yaakov Engel to decide which
points are more linearly independent than others.
</p>
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>kcentroid</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kcentroid_abstract.h</spec_file>
<description>
This object represents a weighted sum of sample points in a kernel induced
feature space. It can be used to kernelized any algorithm that requires only
the ability to perform vector addition, subtraction, scalar multiplication,
and inner products.
<p>
An example use of this object is as an online algorithm for recursively estimating
the centroid of a sequence of training points. This object then allows you to
compute the distance between the centroid and any test points. So you can use
this object to predict how similar a test point is to the data this object has
been trained on (larger distances from the centroid indicate dissimilarity/anomalous
points).
</p>
<p>
The object internally keeps a set of "dictionary vectors"
that are used to represent the centroid. It manages these vectors using the
sparsification technique described in the paper The Kernel Recursive Least
Squares Algorithm by Yaakov Engel. This technique allows us to keep the
number of dictionary vectors down to a minimum. In fact, the object has a
user selectable tolerance parameter that controls the trade off between
accuracy and number of stored dictionary vectors.
</p>
</description>
<examples>
<example>kcentroid_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>train_probabilistic_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
<p>
Trains a <a href="#probabilistic_decision_function">probabilistic_decision_function</a> using
some sort of batch trainer object such as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or
<a href="#rbf_network_trainer">rbf_network_trainer</a>.
</p>
The probability model is created by using the technique described in the paper:
<blockquote>
Probabilistic Outputs for Support Vector Machines and
Comparisons to Regularized Likelihood Methods by
John C. Platt. Match 26, 1999
</blockquote>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rbf_network_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rbf_network_abstract.h</spec_file>
<description>
Trains a radial basis function network and outputs a <a href="#decision_function">decision_function</a>.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>rvm_regression_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rvm_abstract.h</spec_file>
<description>
<p>
Trains a relevance vector machine for solving regression problems.
Outputs a <a href="#decision_function">decision_function</a> that represents the learned
regression function.
</p>
The implementation of the RVM training algorithm used by this library is based
on the following paper:
<blockquote>
Tipping, M. E. and A. C. Faul (2003). Fast marginal likelihood maximisation
for sparse Bayesian models. In C. M. Bishop and B. J. Frey (Eds.), Proceedings
of the Ninth International Workshop on Artificial Intelligence and Statistics,
Key West, FL, Jan 3-6.
</blockquote>
</description>
<examples>
<example>rvm_regression_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rvm_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rvm_abstract.h</spec_file>
<description>
<p>
Trains a relevance vector machine for solving binary classification problems.
Outputs a <a href="#decision_function">decision_function</a> that represents the learned classifier.
</p>
The implementation of the RVM training algorithm used by this library is based
on the following paper:
<blockquote>
Tipping, M. E. and A. C. Faul (2003). Fast marginal likelihood maximisation
for sparse Bayesian models. In C. M. Bishop and B. J. Frey (Eds.), Proceedings
of the Ninth International Workshop on Artificial Intelligence and Statistics,
Key West, FL, Jan 3-6.
</blockquote>
</description>
<examples>
<example>rvm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>svm_nu_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
<p>
Trains a nu support vector classifier and outputs a <a href="#decision_function">decision_function</a>.
</p>
The implementation of the nu-svm training algorithm used by this library is based
on the following excellent papers:
<ul>
<li>Chang and Lin, Training {nu}-Support Vector Classifiers: Theory and Algorithms</li>
<li>Chih-Chung Chang and Chih-Jen Lin, LIBSVM : a library for support vector
machines, 2001. Software available at
<a href="http://www.csie.ntu.edu.tw/~cjlin/libsvm">http://www.csie.ntu.edu.tw/~cjlin/libsvm</a></li>
</ul>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>normalized_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a container for another function
object and an instance of the <a href="#vector_normalizer">vector_normalizer</a> object.
It automatically normalizes all inputs before passing them
off to the contained function object.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>probabilistic_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a binary decision function for use with
support vector machines. It returns an
estimate of the probability that a given sample is in the +1 class.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>distance_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a point in kernel induced feature space.
You may use this object to find the distance from the point it
represents to points in input space.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a decision or regression function that was
learned by a kernel based learning algorithm.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>offset_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a kernel with a fixed value offset
added to it.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>linear_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a linear function kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>sigmoid_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a sigmoid kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>polynomial_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a polynomial kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>radial_basis_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a radial basis function kernel for use with
kernel learning machines.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>is_binary_classification_problem</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
This function simply takes two vectors, the first containing feature vectors and
the second containing labels, and reports back if the two could possibly
contain data for a well formed classification problem.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>randomize_samples</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Randomizes the order of samples in a column vector containing sample data.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rank_features</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/feature_ranking_abstract.h</spec_file>
<description>
Finds a ranking of the top N (a user supplied parameter) features in a set of data
from a two class classification problem. It
does this by computing the distance between the centroids of both classes in kernel defined
feature space. Good features are then ones that result in the biggest separation between
the two centroids.
</description>
<examples>
<example>rank_features_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>batch</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#batch_trainer">batch_trainer</a> objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>verbose_batch</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#batch_trainer">batch_trainer</a> objects. This function
generates a batch_trainer that will print status messages to standard
output so that you can observe the progress of a training algorithm.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>batch_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a batch trainer object that is meant to wrap online trainer objects
that create <a href="#decision_function">decision_functions</a>. It
turns an online learning algorithm such as <a href="#svm_pegasos">svm_pegasos</a>
into a batch learning object. This allows you to use objects like
svm_pegasos with functions (e.g. <a href="#cross_validate_trainer">cross_validate_trainer</a>)
that expect batch mode training objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#reduced_decision_function_trainer">reduced_decision_function_trainer</a>
objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced_decision_function_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a batch trainer object that is meant to wrap other batch trainer objects
that create <a href="#decision_function">decision_function</a> objects.
It performs post processing on the output decision_function objects
with the intent of representing the decision_function with fewer
support vectors.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced2</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#reduced_decision_function_trainer2">reduced_decision_function_trainer2</a>
objects.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced_decision_function_trainer2</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
<p>
This is a batch trainer object that is meant to wrap other batch trainer objects
that create <a href="#decision_function">decision_function</a> objects.
It performs post processing on the output decision_function objects
with the intent of representing the decision_function with fewer
support vectors.
</p>
<p>
It begins by performing the same post processing as
the <a href="#reduced_decision_function_trainer">reduced_decision_function_trainer</a>
object but it also performs a global gradient based optimization
to further improve the results.
</p>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>test_binary_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Tests a <a href="#decision_function">decision_function</a> that represents a binary decision function and
returns the test accuracy.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cross_validate_trainer_threaded</name>
<file>dlib/svm_threaded.h</file>
<spec_file link="true">dlib/svm/svm_threaded_abstract.h</spec_file>
<description>
Performs k-fold cross validation on a user supplied trainer object such
as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or <a href="#rbf_network_trainer">rbf_network_trainer</a>. This function does the same thing as <a href="#cross_validate_trainer">cross_validate_trainer</a>
except this function also allows you to specify how many threads of execution to use.
So you can use this function to take advantage of a multi-core system to perform
cross validation faster.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cross_validate_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Performs k-fold cross validation on a user supplied trainer object such
as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or <a href="#rbf_network_trainer">rbf_network_trainer</a>.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
......
<?xml version="1.0" encoding="ISO-8859-1"?>
<?xml-stylesheet type="text/xsl" href="stylesheet.xsl"?>
<doc>
<title>Bayesian Networks</title>
<!-- ************************************************************************* -->
<body>
<br/><br/>
<p>
This page documents all the tools within the dlib library that relate
to the construction and evaluation of Bayesian networks. If you want
a quick introduction to the tools then you should consult the
Bayesian Net <a href="bayes_net_ex.cpp.html">example program</a>.
</p>
<p>
The
library also comes with a graphical application to assist in the
creation of bayesian networks. This application is one of the example
programs, so to use it you have to compile it yourself or download
one of the binaries from the sourceforge download page.
</p>
</body>
<!-- ************************************************************************* -->
<menu width="150">
<top>
<section>
<name>Tools</name>
<item>assignment</item>
<item>joint_probability_table</item>
<item>conditional_probability_table</item>
<item>bayes_node</item>
<item>bayesian_network_gibbs_sampler</item>
<item>bayesian_network_join_tree</item>
</section>
<section>
<name>Node Utilities</name>
<item>set_node_value</item>
<item>node_value</item>
<item>node_is_evidence</item>
<item>set_node_as_evidence</item>
<item>set_node_as_nonevidence</item>
<item>set_node_num_values</item>
<item>node_num_values</item>
<item>node_probability</item>
<item>set_node_probability</item>
<item>node_first_parent_assignment</item>
<item>node_next_parent_assignment</item>
<item>node_cpt_filled_out</item>
</section>
</top>
</menu>
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<components>
<!-- ************************************************************************* -->
<component>
<name>bayesian_network_join_tree</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents an implementation of the join tree algorithm
(a.k.a. the junction tree algorithm)
for inference in bayesian networks.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
<example>bayes_net_from_disk_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bayesian_network_gibbs_sampler</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object performs Markov Chain Monte Carlo sampling of a bayesian
network using the Gibbs sampling technique.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bayes_node</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a node in a bayesian network. It is
intended to be used inside the <a href="containers.html#directed_graph">directed_graph</a> object to
represent bayesian networks.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>conditional_probability_table</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a conditional probability table.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>joint_probability_table</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object represents a joint probability table.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This object models an assignment of random variables to particular values.
It is used with the <a href="#joint_probability_table">joint_probability_table</a> and
<a href="#conditional_probability_table">conditional_probability_table</a>
objects to represent assignments of various random variables to actual values.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_probability</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the probability of a
<a href="#bayes_node">bayes_node</a> given its parents when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_first_parent_assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain an <a href="#assignment">assignment</a>
that contains all the parents of a node in a bayesian network.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_next_parent_assignment</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily loop through all the parent <a href="#assignment">assignments</a>
of a node in a bayesian network.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_cpt_filled_out</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily verify that a node
in a bayesian network has its <a href="#conditional_probability_table">conditional_probability_table</a>
completely filled out.
</description>
<examples>
<example>bayes_net_gui_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_probability</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the probability of a
<a href="#bayes_node">bayes_node</a> given its parents when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_num_values</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the number of values of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_num_values</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the number of values of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_as_nonevidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily remove the evidence flag of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_as_evidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily set the evidence flag of a
<a href="#bayes_node">bayes_node</a> when it is inside
a <a href="containers.html#directed_graph">directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_is_evidence</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily determine if a
<a href="#bayes_node">bayes_node</a> is evidence when it is inside
a <a href="containers.html#directed_graph"> directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>node_value</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily obtain the value of a
<a href="#bayes_node">bayes_node</a> when it is inside a <a href="containers.html#directed_graph">
directed_graph</a> object.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>set_node_value</name>
<file>dlib/bayes_utils.h</file>
<spec_file link="true">dlib/bayes_utils/bayes_utils_abstract.h</spec_file>
<description>
This is a function declared in the dlib::bayes_node_utils namespace. It
is a convenience function that allows you to easily modify the value of a
<a href="#bayes_node">bayes_node</a> when it is inside a <a href="containers.html#directed_graph">
directed_graph</a> object.
</description>
<examples>
<example>bayes_net_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
</components>
<!-- ************************************************************************* -->
</doc>
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
short section about the component you would like to use. Then pick which implementation you short section about the component you would like to use. Then pick which implementation you
would like and typedef it to something nice. Here is an example of creating a typedef for would like and typedef it to something nice. Here is an example of creating a typedef for
a set of integers using the first kernel implementation. <br/> a set of integers using the first kernel implementation. <br/>
<tt>typedef dlib::set&lt;int&gt;::kernel_1a set_of_ints;</tt> <tt>typedef dlib::set&lt;int&gt;::kernel_1a set_of_ints;</tt>. Or as another example,
if you wanted to make an expandable array of ints you might say <br/>
<tt>typedef dlib::array&lt;int&gt;::expand_1d_c array_of_ints;</tt>.
</p> </p>
......
...@@ -117,18 +117,22 @@ ...@@ -117,18 +117,22 @@
<li><b>Machine Learning Algorithms</b> <li><b>Machine Learning Algorithms</b>
<ul> <ul>
<li><a href="algorithms.html#mlp">Multi layer perceptrons</a> </li> <li><a href="ml.html#mlp">Multi layer perceptrons</a> </li>
<li><a href="algorithms.html#svm_nu_trainer">Support vector machines</a> for classification</li> <li><a href="ml.html#svm_nu_trainer">Support vector machines</a> for classification</li>
<li>Relevance vector machines for <a href="algorithms.html#rvm_trainer">classification</a> <li>Relevance vector machines for <a href="ml.html#rvm_trainer">classification</a>
and <a href="algorithms.html#rvm_regression_trainer">regression</a> </li> and <a href="ml.html#rvm_regression_trainer">regression</a> </li>
<li>An online <a href="algorithms.html#krls">kernel RLS regression</a> algorithm</li> <li>An online <a href="ml.html#krls">kernel RLS regression</a> algorithm</li>
<li>An online <a href="algorithms.html#svm_pegasos">SVM classification</a> algorithm</li> <li>An online <a href="ml.html#svm_pegasos">SVM classification</a> algorithm</li>
<li>An online kernelized <a href="algorithms.html#kcentroid">centroid estimator</a>/novelty detector</li> <li>An online kernelized <a href="ml.html#kcentroid">centroid estimator</a>/novelty detector</li>
<li>A kernelized <a href="algorithms.html#kkmeans">k-means</a> clustering algorithm</li> <li>A kernelized <a href="ml.html#kkmeans">k-means</a> clustering algorithm</li>
<li><a href="algorithms.html#rbf_network_trainer">Radial Basis Function Networks</a></li> <li><a href="ml.html#rbf_network_trainer">Radial Basis Function Networks</a></li>
<li>Bayesian network inference algorithms such as the </ul>
<a href="algorithms.html#bayesian_network_join_tree">join tree</a> algorithm and </li>
<a href="algorithms.html#bayesian_network_gibbs_sampler">gibbs sampler</a> markov chain monte carlo algorithm</li>
<li><b>Bayesian Network Inference Algorithms</b>
<ul>
<li><a href="bayes.html#bayesian_network_join_tree">join tree</a> algorithm for exact inference</li>
<li><a href="bayes.html#bayesian_network_gibbs_sampler">gibbs sampler</a> markov chain monte carlo algorithm</li>
</ul> </ul>
</li> </li>
......
...@@ -10,6 +10,16 @@ ...@@ -10,6 +10,16 @@
<link>algorithms.html</link> <link>algorithms.html</link>
<chm_sub>algorithms.xml</chm_sub> <chm_sub>algorithms.xml</chm_sub>
</item> </item>
<item>
<name>Machine Learning</name>
<link>ml.html</link>
<chm_sub>ml.xml</chm_sub>
</item>
<item>
<name>Bayesian Nets</name>
<link>bayes.html</link>
<chm_sub>bayes.xml</chm_sub>
</item>
<item> <item>
<name>Containers</name> <name>Containers</name>
<link>containers.html</link> <link>containers.html</link>
......
<?xml version="1.0" encoding="ISO-8859-1"?>
<?xml-stylesheet type="text/xsl" href="stylesheet.xsl"?>
<doc>
<title>Machine Learning</title>
<!-- ************************************************************************* -->
<body>
<br/><br/>
<p>
This page documents all the machine learning algorithms present in
the library. In particular, there are algorithms for performing
binary classification, regression, clustering, anomaly detection,
and feature ranking, as well as algorithms for doing more
specialized computations.
</p>
<p>
A good tutorial and introduction to the general concepts used by most of the
objects in this part of the library can be found in the <a href="svm_ex.cpp.html">svm example</a> program.
</p>
<p>
The major design goal of this portion of the library is to provide a highly modular and
simple architecture for dealing with kernel algorithms. Towards this end, dlib takes a generic
programming approach using C++ templates. In particular, each algorithm is parameterized
to allow a user to supply either one of the predefined dlib kernels (e.g. <a
href="#radial_basis_kernel">RBF</a> operating
on <a href="containers.html#matrix">column vectors</a>), or a new user defined kernel.
Moreover, the implementations of the algorithms are totally separated from the data on
which they operate. This makes the dlib implementation generic enough to operate on
any kind of data, be it column vectors, images, or some other form of structured data.
All that is necessary is an appropriate kernel.
</p>
</body>
<!-- ************************************************************************* -->
<menu width="150">
<top>
<section>
<name>Primary Algorithms</name>
<item>mlp</item>
<item>krls</item>
<item>kcentroid</item>
<item>linearly_independent_subset_finder</item>
<item>kkmeans</item>
<item>svm_nu_trainer</item>
<item>rvm_trainer</item>
<item>rvm_regression_trainer</item>
<item>rbf_network_trainer</item>
<item>rank_features</item>
<item>svm_pegasos</item>
</section>
<section>
<name>Trainer Adapters</name>
<item>train_probabilistic_decision_function</item>
<item>reduced_decision_function_trainer</item>
<item>reduced</item>
<item>reduced_decision_function_trainer2</item>
<item>reduced2</item>
<item>batch</item>
<item>verbose_batch</item>
<item>batch_trainer</item>
</section>
<section>
<name>Kernels</name>
<item>radial_basis_kernel</item>
<item>polynomial_kernel</item>
<item>sigmoid_kernel</item>
<item>linear_kernel</item>
<item>offset_kernel</item>
</section>
<section>
<name>Function Objects</name>
<item>decision_function</item>
<item>distance_function</item>
<item>probabilistic_decision_function</item>
<item>normalized_function</item>
</section>
<section>
<name>Miscellaneous</name>
<item>vector_normalizer</item>
<item>randomize_samples</item>
<item>is_binary_classification_problem</item>
<item>test_binary_decision_function</item>
<item>cross_validate_trainer</item>
<item>cross_validate_trainer_threaded</item>
<item>pick_initial_centers</item>
</section>
</top>
</menu>
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<components>
<!-- ************************************************************************* -->
<component>
<name>pick_initial_centers</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kkmeans_abstract.h</spec_file>
<description>
This is a function that you can use to seed data clustering algorithms
like the <a href="#kkmeans">kkmeans</a> clustering method. What it
does is pick reasonable starting points for clustering by basically
trying to find a set of points that are all far away from each other.
</description>
<examples>
<example>kkmeans_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component checked="true">
<name>mlp</name>
<file>dlib/mlp.h</file>
<spec_file>dlib/mlp/mlp_kernel_abstract.h</spec_file>
<description>
<p>
This object represents a multilayer layer perceptron network that is
trained using the back propagation algorithm. The training algorithm also
incorporates the momentum method. That is, each round of back propagation
training also adds a fraction of the previous update. This fraction
is controlled by the momentum term set in the constructor.
</p>
<p>
It is worth noting that a MLP is, in general, very inferior to modern
kernel algorithms such as the support vector machine. So if you haven't
tried any other techniques with your data you really should.
</p>
</description>
<examples>
<example>mlp_ex.cpp.html</example>
</examples>
<implementations>
<implementation>
<name>mlp_kernel_1</name>
<file>dlib/mlp/mlp_kernel_1.h</file>
<description>
This is implemented in the obvious way.
</description>
<typedefs>
<typedef>
<name>kernel_1a</name>
<description>is a typedef for mlp_kernel_1</description>
</typedef>
</typedefs>
</implementation>
</implementations>
</component>
<!-- ************************************************************************* -->
<component>
<name>krls</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/krls_abstract.h</spec_file>
<description>
This is an implementation of the kernel recursive least squares algorithm
described in the paper The Kernel Recursive Least Squares Algorithm by Yaakov Engel.
<p>
The long and short of this algorithm is that it is an online kernel based
regression algorithm. You give it samples (x,y) and it learns the function
f(x) == y. For a detailed description of the algorithm read the above paper.
</p>
</description>
<examples>
<example>krls_ex.cpp.html</example>
<example>krls_filter_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>svm_pegasos</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This object implements an online algorithm for training a support
vector machine for solving binary classification problems.
<p>
The implementation of the Pegasos algorithm used by this object is based
on the following excellent paper:
<blockquote>
Pegasos: Primal estimated sub-gradient solver for SVM (2007)
by Yoram Singer, Nathan Srebro
In ICML
</blockquote>
</p>
<p>
This SVM training algorithm has two interesting properties. First, the
pegasos algorithm itself converges to the solution in an amount of time
unrelated to the size of the training set (in addition to being quite fast
to begin with). This makes it an appropriate algorithm for learning from
very large datasets. Second, this object uses the <a href="#kcentroid">kcentroid</a> object
to maintain a sparse approximation of the learned decision function.
This means that the number of support vectors in the resulting decision
function is also unrelated to the size of the dataset (in normal SVM
training algorithms, the number of support vectors grows approximately
linearly with the size of the training set).
</p>
</description>
<examples>
<example>svm_pegasos_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>kkmeans</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kkmeans_abstract.h</spec_file>
<description>
This is an implementation of a kernelized k-means clustering algorithm.
It performs k-means clustering by using the <a href="#kcentroid">kcentroid</a> object.
</description>
<examples>
<example>kkmeans_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>vector_normalizer</name>
<file>dlib/statistics.h</file>
<spec_file link="true">dlib/statistics/statistics_abstract.h</spec_file>
<description>
This object represents something that can learn to normalize a set
of column vectors. In particular, normalized column vectors should
have zero mean and a variance of one.
Also, if desired, this object can use principal component
analysis for the purposes of reducing the number of elements in a
vector.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>linearly_independent_subset_finder</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/linearly_independent_subset_finder_abstract.h</spec_file>
<description>
<p>
This is an implementation of an online algorithm for recursively finding a
set of linearly independent vectors in a kernel induced feature space. To
use it you decide how large you would like the set to be and then you feed it
sample points.
</p>
<p>
Each time you present it with a new sample point it either
keeps the current set of independent points unchanged, or if the new point
is "more linearly independent" than one of the points it already has,
it replaces the weakly linearly independent point with the new one.
</p>
<p>
This object uses the Approximately Linearly Dependent metric described in the paper
The Kernel Recursive Least Squares Algorithm by Yaakov Engel to decide which
points are more linearly independent than others.
</p>
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>kcentroid</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kcentroid_abstract.h</spec_file>
<description>
This object represents a weighted sum of sample points in a kernel induced
feature space. It can be used to kernelized any algorithm that requires only
the ability to perform vector addition, subtraction, scalar multiplication,
and inner products.
<p>
An example use of this object is as an online algorithm for recursively estimating
the centroid of a sequence of training points. This object then allows you to
compute the distance between the centroid and any test points. So you can use
this object to predict how similar a test point is to the data this object has
been trained on (larger distances from the centroid indicate dissimilarity/anomalous
points).
</p>
<p>
The object internally keeps a set of "dictionary vectors"
that are used to represent the centroid. It manages these vectors using the
sparsification technique described in the paper The Kernel Recursive Least
Squares Algorithm by Yaakov Engel. This technique allows us to keep the
number of dictionary vectors down to a minimum. In fact, the object has a
user selectable tolerance parameter that controls the trade off between
accuracy and number of stored dictionary vectors.
</p>
</description>
<examples>
<example>kcentroid_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>train_probabilistic_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
<p>
Trains a <a href="#probabilistic_decision_function">probabilistic_decision_function</a> using
some sort of batch trainer object such as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or
<a href="#rbf_network_trainer">rbf_network_trainer</a>.
</p>
The probability model is created by using the technique described in the paper:
<blockquote>
Probabilistic Outputs for Support Vector Machines and
Comparisons to Regularized Likelihood Methods by
John C. Platt. Match 26, 1999
</blockquote>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rbf_network_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rbf_network_abstract.h</spec_file>
<description>
Trains a radial basis function network and outputs a <a href="#decision_function">decision_function</a>.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>rvm_regression_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rvm_abstract.h</spec_file>
<description>
<p>
Trains a relevance vector machine for solving regression problems.
Outputs a <a href="#decision_function">decision_function</a> that represents the learned
regression function.
</p>
The implementation of the RVM training algorithm used by this library is based
on the following paper:
<blockquote>
Tipping, M. E. and A. C. Faul (2003). Fast marginal likelihood maximisation
for sparse Bayesian models. In C. M. Bishop and B. J. Frey (Eds.), Proceedings
of the Ninth International Workshop on Artificial Intelligence and Statistics,
Key West, FL, Jan 3-6.
</blockquote>
</description>
<examples>
<example>rvm_regression_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rvm_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/rvm_abstract.h</spec_file>
<description>
<p>
Trains a relevance vector machine for solving binary classification problems.
Outputs a <a href="#decision_function">decision_function</a> that represents the learned classifier.
</p>
The implementation of the RVM training algorithm used by this library is based
on the following paper:
<blockquote>
Tipping, M. E. and A. C. Faul (2003). Fast marginal likelihood maximisation
for sparse Bayesian models. In C. M. Bishop and B. J. Frey (Eds.), Proceedings
of the Ninth International Workshop on Artificial Intelligence and Statistics,
Key West, FL, Jan 3-6.
</blockquote>
</description>
<examples>
<example>rvm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>svm_nu_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
<p>
Trains a nu support vector classifier and outputs a <a href="#decision_function">decision_function</a>.
</p>
The implementation of the nu-svm training algorithm used by this library is based
on the following excellent papers:
<ul>
<li>Chang and Lin, Training {nu}-Support Vector Classifiers: Theory and Algorithms</li>
<li>Chih-Chung Chang and Chih-Jen Lin, LIBSVM : a library for support vector
machines, 2001. Software available at
<a href="http://www.csie.ntu.edu.tw/~cjlin/libsvm">http://www.csie.ntu.edu.tw/~cjlin/libsvm</a></li>
</ul>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>normalized_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a container for another function
object and an instance of the <a href="#vector_normalizer">vector_normalizer</a> object.
It automatically normalizes all inputs before passing them
off to the contained function object.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>probabilistic_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a binary decision function for use with
support vector machines. It returns an
estimate of the probability that a given sample is in the +1 class.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>distance_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a point in kernel induced feature space.
You may use this object to find the distance from the point it
represents to points in input space.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/function_abstract.h</spec_file>
<description>
This object represents a decision or regression function that was
learned by a kernel based learning algorithm.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>offset_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a kernel with a fixed value offset
added to it.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>linear_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a linear function kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>sigmoid_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a sigmoid kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>polynomial_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a polynomial kernel for use with
kernel learning machines.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>radial_basis_kernel</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/kernel_abstract.h</spec_file>
<description>
This object represents a radial basis function kernel for use with
kernel learning machines.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>is_binary_classification_problem</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
This function simply takes two vectors, the first containing feature vectors and
the second containing labels, and reports back if the two could possibly
contain data for a well formed classification problem.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>randomize_samples</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Randomizes the order of samples in a column vector containing sample data.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>rank_features</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/feature_ranking_abstract.h</spec_file>
<description>
Finds a ranking of the top N (a user supplied parameter) features in a set of data
from a two class classification problem. It
does this by computing the distance between the centroids of both classes in kernel defined
feature space. Good features are then ones that result in the biggest separation between
the two centroids.
</description>
<examples>
<example>rank_features_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>batch</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#batch_trainer">batch_trainer</a> objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>verbose_batch</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#batch_trainer">batch_trainer</a> objects. This function
generates a batch_trainer that will print status messages to standard
output so that you can observe the progress of a training algorithm.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>batch_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/pegasos_abstract.h</spec_file>
<description>
This is a batch trainer object that is meant to wrap online trainer objects
that create <a href="#decision_function">decision_functions</a>. It
turns an online learning algorithm such as <a href="#svm_pegasos">svm_pegasos</a>
into a batch learning object. This allows you to use objects like
svm_pegasos with functions (e.g. <a href="#cross_validate_trainer">cross_validate_trainer</a>)
that expect batch mode training objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#reduced_decision_function_trainer">reduced_decision_function_trainer</a>
objects.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced_decision_function_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a batch trainer object that is meant to wrap other batch trainer objects
that create <a href="#decision_function">decision_function</a> objects.
It performs post processing on the output decision_function objects
with the intent of representing the decision_function with fewer
support vectors.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced2</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
This is a convenience function for creating
<a href="#reduced_decision_function_trainer2">reduced_decision_function_trainer2</a>
objects.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>reduced_decision_function_trainer2</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/reduced_abstract.h</spec_file>
<description>
<p>
This is a batch trainer object that is meant to wrap other batch trainer objects
that create <a href="#decision_function">decision_function</a> objects.
It performs post processing on the output decision_function objects
with the intent of representing the decision_function with fewer
support vectors.
</p>
<p>
It begins by performing the same post processing as
the <a href="#reduced_decision_function_trainer">reduced_decision_function_trainer</a>
object but it also performs a global gradient based optimization
to further improve the results.
</p>
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>test_binary_decision_function</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Tests a <a href="#decision_function">decision_function</a> that represents a binary decision function and
returns the test accuracy.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cross_validate_trainer_threaded</name>
<file>dlib/svm_threaded.h</file>
<spec_file link="true">dlib/svm/svm_threaded_abstract.h</spec_file>
<description>
Performs k-fold cross validation on a user supplied trainer object such
as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or <a href="#rbf_network_trainer">rbf_network_trainer</a>.
This function does the same thing as <a href="#cross_validate_trainer">cross_validate_trainer</a>
except this function also allows you to specify how many threads of execution to use.
So you can use this function to take advantage of a multi-core system to perform
cross validation faster.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cross_validate_trainer</name>
<file>dlib/svm.h</file>
<spec_file link="true">dlib/svm/svm_abstract.h</spec_file>
<description>
Performs k-fold cross validation on a user supplied trainer object such
as the <a href="#svm_nu_trainer">svm_nu_trainer</a> or <a href="#rbf_network_trainer">rbf_network_trainer</a>.
</description>
<examples>
<example>svm_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
</components>
<!-- ************************************************************************* -->
</doc>
...@@ -57,6 +57,7 @@ ...@@ -57,6 +57,7 @@
<item>toupper</item> <item>toupper</item>
<item>convert_utf8_to_utf32</item> <item>convert_utf8_to_utf32</item>
<item>is_combining_char</item> <item>is_combining_char</item>
<item>strings_equal_ignore_case</item>
</section> </section>
</top> </top>
</menu> </menu>
...@@ -216,6 +217,18 @@ ...@@ -216,6 +217,18 @@
</component> </component>
<!-- ************************************************************************* -->
<component>
<name>strings_equal_ignore_case</name>
<file>dlib/string.h</file>
<spec_file link="true">dlib/string/string_abstract.h</spec_file>
<description>
This is a pair of functions to do a case insensitive comparison between strings.
</description>
</component>
<!-- ************************************************************************* --> <!-- ************************************************************************* -->
<component> <component>
......
...@@ -355,27 +355,26 @@ ...@@ -355,27 +355,26 @@
<term link="algorithms.html#find_min_quasi_newton2" name="find_min_quasi_newton2"/> <term link="algorithms.html#find_min_quasi_newton2" name="find_min_quasi_newton2"/>
<term link="algorithms.html#find_min_conjugate_gradient2" name="find_min_conjugate_gradient2"/> <term link="algorithms.html#find_min_conjugate_gradient2" name="find_min_conjugate_gradient2"/>
<term link="bayes.html#set_node_value" name="set_node_value"/>
<term link="bayes.html#node_value" name="node_value"/>
<term link="bayes.html#node_is_evidence" name="node_is_evidence"/>
<term link="bayes.html#set_node_as_evidence" name="set_node_as_evidence"/>
<term link="bayes.html#set_node_as_nonevidence" name="set_node_as_nonevidence"/>
<term link="bayes.html#set_node_num_values" name="set_node_num_values"/>
<term link="bayes.html#node_num_values" name="node_num_values"/>
<term link="bayes.html#node_probability" name="node_probability"/>
<term link="bayes.html#set_node_probability" name="set_node_probability"/>
<term link="bayes.html#assignment" name="assignment"/>
<term link="bayes.html#joint_probability_table" name="joint_probability_table"/>
<term link="bayes.html#conditional_probability_table" name="conditional_probability_table"/>
<term link="bayes.html#bayes_node" name="bayes_node"/>
<term link="bayes.html#bayesian_network_gibbs_sampler" name="bayesian_network_gibbs_sampler"/>
<term link="bayes.html#bayesian_network_join_tree" name="bayesian_network_join_tree"/>
<term link="bayes.html#node_first_parent_assignment" name="node_first_parent_assignment"/>
<term link="bayes.html#node_next_parent_assignment" name="node_next_parent_assignment"/>
<term link="bayes.html#node_cpt_filled_out" name="node_cpt_filled_out"/>
<term link="algorithms.html#rectangle" name="rectangle"/> <term link="algorithms.html#rectangle" name="rectangle"/>
<term link="algorithms.html#set_node_value" name="set_node_value"/>
<term link="algorithms.html#node_value" name="node_value"/>
<term link="algorithms.html#node_is_evidence" name="node_is_evidence"/>
<term link="algorithms.html#set_node_as_evidence" name="set_node_as_evidence"/>
<term link="algorithms.html#set_node_as_nonevidence" name="set_node_as_nonevidence"/>
<term link="algorithms.html#set_node_num_values" name="set_node_num_values"/>
<term link="algorithms.html#node_num_values" name="node_num_values"/>
<term link="algorithms.html#node_probability" name="node_probability"/>
<term link="algorithms.html#set_node_probability" name="set_node_probability"/>
<term link="algorithms.html#assignment" name="assignment"/>
<term link="algorithms.html#joint_probability_table" name="joint_probability_table"/>
<term link="algorithms.html#conditional_probability_table" name="conditional_probability_table"/>
<term link="algorithms.html#bayes_node" name="bayes_node"/>
<term link="algorithms.html#bayesian_network_gibbs_sampler" name="bayesian_network_gibbs_sampler"/>
<term link="algorithms.html#bayesian_network_join_tree" name="bayesian_network_join_tree"/>
<term link="algorithms.html#node_first_parent_assignment" name="node_first_parent_assignment"/>
<term link="algorithms.html#node_next_parent_assignment" name="node_next_parent_assignment"/>
<term link="algorithms.html#node_cpt_filled_out" name="node_cpt_filled_out"/>
<term link="algorithms.html#edge" name="edge"/> <term link="algorithms.html#edge" name="edge"/>
<term link="algorithms.html#is_join_tree" name="is_join_tree"/> <term link="algorithms.html#is_join_tree" name="is_join_tree"/>
...@@ -391,9 +390,6 @@ ...@@ -391,9 +390,6 @@
<term link="algorithms.html#isort_array" name="isort_array"/> <term link="algorithms.html#isort_array" name="isort_array"/>
<term link="algorithms.html#md5" name="md5"/> <term link="algorithms.html#md5" name="md5"/>
<term link="algorithms.html#median" name="median"/> <term link="algorithms.html#median" name="median"/>
<term link="algorithms.html#mlp" name="mlp"/>
<term link="algorithms.html#mlp" name="multi-layer perceptron"/>
<term link="algorithms.html#mlp" name="neural network"/>
<term link="algorithms.html#qsort_array" name="qsort_array"/> <term link="algorithms.html#qsort_array" name="qsort_array"/>
<term link="algorithms.html#rand" name="rand"/> <term link="algorithms.html#rand" name="rand"/>
<term link="algorithms.html#rand" name="Mersenne Twister"/> <term link="algorithms.html#rand" name="Mersenne Twister"/>
...@@ -408,53 +404,55 @@ ...@@ -408,53 +404,55 @@
<term link="algorithms.html#graph_is_connected" name="graph_is_connected"/> <term link="algorithms.html#graph_is_connected" name="graph_is_connected"/>
<term link="algorithms.html#is_clique" name="is_clique"/> <term link="algorithms.html#is_clique" name="is_clique"/>
<term link="algorithms.html#is_maximal_clique" name="is_maximal_clique"/> <term link="algorithms.html#is_maximal_clique" name="is_maximal_clique"/>
<term link="algorithms.html#svm_pegasos" name="svm_pegasos"/>
<term link="algorithms.html#batch" name="batch"/>
<term link="algorithms.html#verbose_batch" name="verbose_batch"/>
<term link="algorithms.html#batch_trainer" name="batch_trainer"/>
<term link="algorithms.html#randomize_samples" name="randomize_samples"/>
<term link="algorithms.html#is_binary_classification_problem" name="is_binary_classification_problem"/>
<term link="algorithms.html#square_root" name="square_root"/> <term link="algorithms.html#square_root" name="square_root"/>
<term link="algorithms.html#svm_nu_trainer" name="svm_nu_trainer"/>
<term link="algorithms.html#rvm_trainer" name="rvm_trainer"/>
<term link="algorithms.html#rvm_regression_trainer" name="rvm_regression_trainer"/>
<term link="algorithms.html#rbf_network_trainer" name="rbf_network_trainer"/>
<term link="algorithms.html#reduced" name="reduced"/>
<term link="algorithms.html#reduced_decision_function_trainer" name="reduced_decision_function_trainer"/>
<term link="algorithms.html#reduced2" name="reduced2"/>
<term link="algorithms.html#reduced_decision_function_trainer2" name="reduced_decision_function_trainer2"/>
<term link="algorithms.html#train_probabilistic_decision_function" name="train_probabilistic_decision_function"/>
<term link="algorithms.html#cross_validate_trainer" name="cross_validate_trainer"/>
<term link="algorithms.html#cross_validate_trainer_threaded" name="cross_validate_trainer_threaded"/>
<term link="algorithms.html#test_binary_decision_function" name="test_binary_decision_function"/>
<term link="algorithms.html#svm_nu_trainer" name="support vector machine"/>
<term link="algorithms.html#rvm_trainer" name="relevance vector machine"/>
<term link="algorithms.html#vector" name="vector"/> <term link="algorithms.html#vector" name="vector"/>
<term link="algorithms.html#point" name="point"/> <term link="algorithms.html#point" name="point"/>
<term link="algorithms.html#krls" name="krls"/>
<term link="algorithms.html#kcentroid" name="kcentroid"/>
<term link="algorithms.html#linearly_independent_subset_finder" name="linearly_independent_subset_finder"/>
<term link="algorithms.html#running_stats" name="running_stats"/> <term link="algorithms.html#running_stats" name="running_stats"/>
<term link="algorithms.html#vector_normalizer" name="vector_normalizer"/>
<term link="algorithms.html#kkmeans" name="kkmeans"/>
<term link="algorithms.html#pick_initial_centers" name="pick_initial_centers"/>
<term link="algorithms.html#rank_features" name="rank_features"/>
<term link="ml.html#mlp" name="mlp"/>
<term link="ml.html#mlp" name="multi-layer perceptron"/>
<term link="ml.html#mlp" name="neural network"/>
<term link="ml.html#svm_pegasos" name="svm_pegasos"/>
<term link="ml.html#batch" name="batch"/>
<term link="ml.html#verbose_batch" name="verbose_batch"/>
<term link="ml.html#batch_trainer" name="batch_trainer"/>
<term link="ml.html#randomize_samples" name="randomize_samples"/>
<term link="ml.html#is_binary_classification_problem" name="is_binary_classification_problem"/>
<term link="ml.html#svm_nu_trainer" name="svm_nu_trainer"/>
<term link="ml.html#rvm_trainer" name="rvm_trainer"/>
<term link="ml.html#rvm_regression_trainer" name="rvm_regression_trainer"/>
<term link="ml.html#rbf_network_trainer" name="rbf_network_trainer"/>
<term link="ml.html#reduced" name="reduced"/>
<term link="ml.html#reduced_decision_function_trainer" name="reduced_decision_function_trainer"/>
<term link="ml.html#reduced2" name="reduced2"/>
<term link="ml.html#reduced_decision_function_trainer2" name="reduced_decision_function_trainer2"/>
<term link="ml.html#train_probabilistic_decision_function" name="train_probabilistic_decision_function"/>
<term link="ml.html#cross_validate_trainer" name="cross_validate_trainer"/>
<term link="ml.html#cross_validate_trainer_threaded" name="cross_validate_trainer_threaded"/>
<term link="ml.html#test_binary_decision_function" name="test_binary_decision_function"/>
<term link="ml.html#svm_nu_trainer" name="support vector machine"/>
<term link="ml.html#rvm_trainer" name="relevance vector machine"/>
<term link="ml.html#krls" name="krls"/>
<term link="ml.html#kcentroid" name="kcentroid"/>
<term link="ml.html#linearly_independent_subset_finder" name="linearly_independent_subset_finder"/>
<term link="ml.html#vector_normalizer" name="vector_normalizer"/>
<term link="ml.html#kkmeans" name="kkmeans"/>
<term link="ml.html#pick_initial_centers" name="pick_initial_centers"/>
<term link="ml.html#rank_features" name="rank_features"/>
<term link="dlib/svm/svm_abstract.h.html#maximum_nu" name="maximum_nu"/> <term link="dlib/svm/svm_abstract.h.html#maximum_nu" name="maximum_nu"/>
<term link="algorithms.html#decision_function" name="decision_function"/> <term link="ml.html#decision_function" name="decision_function"/>
<term link="algorithms.html#normalized_function" name="normalized_function"/> <term link="ml.html#normalized_function" name="normalized_function"/>
<term link="algorithms.html#distance_function" name="distance_function"/> <term link="ml.html#distance_function" name="distance_function"/>
<term link="algorithms.html#probabilistic_decision_function" name="probabilistic_decision_function"/> <term link="ml.html#probabilistic_decision_function" name="probabilistic_decision_function"/>
<term link="algorithms.html#linear_kernel" name="linear_kernel"/> <term link="ml.html#linear_kernel" name="linear_kernel"/>
<term link="algorithms.html#offset_kernel" name="offset_kernel"/> <term link="ml.html#offset_kernel" name="offset_kernel"/>
<term link="algorithms.html#polynomial_kernel" name="polynomial_kernel"/> <term link="ml.html#polynomial_kernel" name="polynomial_kernel"/>
<term link="algorithms.html#sigmoid_kernel" name="sigmoid_kernel"/> <term link="ml.html#sigmoid_kernel" name="sigmoid_kernel"/>
<term link="algorithms.html#radial_basis_kernel" name="radial_basis_kernel"/> <term link="ml.html#radial_basis_kernel" name="radial_basis_kernel"/>
<term link="dlib/svm/kernel_abstract.h.html#Kernel_Function_Objects" name="Kernel Function Objects"/> <term link="dlib/svm/kernel_abstract.h.html#Kernel_Function_Objects" name="Kernel Function Objects"/>
<term link="dlib/svm/kernel_abstract.h.html#kernel_derivative" name="kernel_derivative"/> <term link="dlib/svm/kernel_abstract.h.html#kernel_derivative" name="kernel_derivative"/>
...@@ -761,6 +759,7 @@ ...@@ -761,6 +759,7 @@
<term link="parsing.html#ustring" name="ustring"/> <term link="parsing.html#ustring" name="ustring"/>
<term link="parsing.html#convert_utf8_to_utf32" name="convert_utf8_to_utf32"/> <term link="parsing.html#convert_utf8_to_utf32" name="convert_utf8_to_utf32"/>
<term link="parsing.html#is_combining_char" name="is_combining_char"/> <term link="parsing.html#is_combining_char" name="is_combining_char"/>
<term link="parsing.html#strings_equal_ignore_case" name="strings_equal_ignore_case"/>
<term link="dlib/unicode/unicode_abstract.h.html#invalid_utf8_error" name="invalid_utf8_error"/> <term link="dlib/unicode/unicode_abstract.h.html#invalid_utf8_error" name="invalid_utf8_error"/>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment