Commit c01632ee authored by Davis King's avatar Davis King

Added a try/catch block around the code in main().

--HG--
extra : convert_revision : svn%3Afdd8eb12-d10e-0410-9acb-85c331704f74/trunk%403299
parent 6b3548b2
...@@ -111,119 +111,126 @@ private: ...@@ -111,119 +111,126 @@ private:
int main() int main()
{ {
// make a column vector of length 2 try
column_vector starting_point; {
starting_point.set_size(2); // make a column vector of length 2
column_vector starting_point;
cout << "Find the minimum of the rosen function()" << endl; starting_point.set_size(2);
// Set the starting point to (4,8). This is the point the optimization algorithm cout << "Find the minimum of the rosen function()" << endl;
// will start out from and it will move it closer and closer to the function's
// minimum point. So generally you want to try and compute a good guess that is // Set the starting point to (4,8). This is the point the optimization algorithm
// somewhat near the actual optimum value. // will start out from and it will move it closer and closer to the function's
starting_point = 4, 8; // minimum point. So generally you want to try and compute a good guess that is
// somewhat near the actual optimum value.
// Now we use the find_min() function to find the minimum point. The first argument starting_point = 4, 8;
// to this routine is the search strategy we want to use. The second argument is the
// stopping strategy. Below I'm using the objective_delta_stop_strategy() which just // Now we use the find_min() function to find the minimum point. The first argument
// says that the search should stop when the change in the function being optimized // to this routine is the search strategy we want to use. The second argument is the
// is small enough. // stopping strategy. Below I'm using the objective_delta_stop_strategy() which just
// says that the search should stop when the change in the function being optimized
// The other arguments to find_min() are the function to be minimized, its derivative, // is small enough.
// then the starting point, and the last is an acceptable minimum value of the rosen()
// function. That is, if the algorithm finds any inputs to rosen() that gives an output // The other arguments to find_min() are the function to be minimized, its derivative,
// value <= -1 then it will stop immediately. Usually you supply a number smaller than // then the starting point, and the last is an acceptable minimum value of the rosen()
// the actual global minimum. So since the smallest output of the rosen function is 0 // function. That is, if the algorithm finds any inputs to rosen() that gives an output
// we just put -1 here which effectively causes this last argument to be disregarded. // value <= -1 then it will stop immediately. Usually you supply a number smaller than
// the actual global minimum. So since the smallest output of the rosen function is 0
find_min(bfgs_search_strategy(), // Use BFGS search algorithm // we just put -1 here which effectively causes this last argument to be disregarded.
objective_delta_stop_strategy(1e-7), // Stop when the change in rosen() is less than 1e-7
&rosen, &rosen_derivative, starting_point, -1); find_min(bfgs_search_strategy(), // Use BFGS search algorithm
// Once the function ends the starting_point vector will contain the optimum point objective_delta_stop_strategy(1e-7), // Stop when the change in rosen() is less than 1e-7
// of (1,1). &rosen, &rosen_derivative, starting_point, -1);
cout << starting_point << endl; // Once the function ends the starting_point vector will contain the optimum point
// of (1,1).
cout << starting_point << endl;
// Now lets try doing it again with a different starting point and the version
// of find_min() that doesn't require you to supply a derivative function.
// This version will compute a numerical approximation of the derivative since // Now lets try doing it again with a different starting point and the version
// we didn't supply one to it. // of find_min() that doesn't require you to supply a derivative function.
starting_point = -94, 5.2; // This version will compute a numerical approximation of the derivative since
find_min_using_approximate_derivatives(bfgs_search_strategy(), // we didn't supply one to it.
objective_delta_stop_strategy(1e-7), starting_point = -94, 5.2;
&rosen, starting_point, -1); find_min_using_approximate_derivatives(bfgs_search_strategy(),
// Again the correct minimum point is found and stored in starting_point objective_delta_stop_strategy(1e-7),
cout << starting_point << endl; &rosen, starting_point, -1);
// Again the correct minimum point is found and stored in starting_point
cout << starting_point << endl;
// Here we repeat the same thing as above but this time using the L-BFGS
// algorithm. L-BFGS is very similar to the BFGS algorithm, however, BFGS
// uses O(N^2) memory where N is the size of the starting_point vector. // Here we repeat the same thing as above but this time using the L-BFGS
// The L-BFGS algorithm however uses only O(N) memory. So if you have a // algorithm. L-BFGS is very similar to the BFGS algorithm, however, BFGS
// function of a huge number of variables the L-BFGS algorithm is probably // uses O(N^2) memory where N is the size of the starting_point vector.
// a better choice. // The L-BFGS algorithm however uses only O(N) memory. So if you have a
starting_point = 4, 8; // function of a huge number of variables the L-BFGS algorithm is probably
find_min(lbfgs_search_strategy(10), // The 10 here is basically a measure of how much memory L-BFGS will use. // a better choice.
objective_delta_stop_strategy(1e-7), starting_point = 4, 8;
&rosen, &rosen_derivative, starting_point, -1); find_min(lbfgs_search_strategy(10), // The 10 here is basically a measure of how much memory L-BFGS will use.
objective_delta_stop_strategy(1e-7),
cout << starting_point << endl; &rosen, &rosen_derivative, starting_point, -1);
starting_point = -94, 5.2; cout << starting_point << endl;
find_min_using_approximate_derivatives(lbfgs_search_strategy(10),
objective_delta_stop_strategy(1e-7), starting_point = -94, 5.2;
&rosen, starting_point, -1); find_min_using_approximate_derivatives(lbfgs_search_strategy(10),
cout << starting_point << endl; objective_delta_stop_strategy(1e-7),
&rosen, starting_point, -1);
cout << starting_point << endl;
// Now lets look at using the test_function object with the optimization
// functions.
cout << "\nFind the minimum of the test_function" << endl; // Now lets look at using the test_function object with the optimization
// functions.
column_vector target; cout << "\nFind the minimum of the test_function" << endl;
target.set_size(4);
starting_point.set_size(4); column_vector target;
target.set_size(4);
// This variable will be used as the target of the test_function. So, starting_point.set_size(4);
// our simple test_function object will have a global minimum at the
// point given by the target. We will then use the optimization // This variable will be used as the target of the test_function. So,
// routines to find this minimum value. // our simple test_function object will have a global minimum at the
target = 3, 5, 1, 7; // point given by the target. We will then use the optimization
// routines to find this minimum value.
// set the starting point far from the global minimum target = 3, 5, 1, 7;
starting_point = 1,2,3,4;
find_min_using_approximate_derivatives(bfgs_search_strategy(), // set the starting point far from the global minimum
objective_delta_stop_strategy(1e-7), starting_point = 1,2,3,4;
test_function(target), starting_point, -1); find_min_using_approximate_derivatives(bfgs_search_strategy(),
// At this point the correct value of (3,6,1,7) should be found and stored in starting_point objective_delta_stop_strategy(1e-7),
cout << starting_point << endl; test_function(target), starting_point, -1);
// At this point the correct value of (3,6,1,7) should be found and stored in starting_point
// Now lets try it again with the conjugate gradient algorithm. cout << starting_point << endl;
starting_point = -4,5,99,3;
find_min_using_approximate_derivatives(cg_search_strategy(), // Now lets try it again with the conjugate gradient algorithm.
objective_delta_stop_strategy(1e-7), starting_point = -4,5,99,3;
test_function(target), starting_point, -1); find_min_using_approximate_derivatives(cg_search_strategy(),
cout << starting_point << endl; objective_delta_stop_strategy(1e-7),
test_function(target), starting_point, -1);
cout << starting_point << endl;
// Finally, lets try the BOBYQA algorithm. This is a technique specially
// designed to minimize a function in the absence of derivative information.
// Generally speaking, it is the method of choice if derivatives are not available. // Finally, lets try the BOBYQA algorithm. This is a technique specially
starting_point = -4,5,99,3; // designed to minimize a function in the absence of derivative information.
find_min_bobyqa(test_function(target), // Generally speaking, it is the method of choice if derivatives are not available.
starting_point, starting_point = -4,5,99,3;
9, // number of interpolation points find_min_bobyqa(test_function(target),
uniform_matrix<double>(4,1, -1e100), // lower bound constraint starting_point,
uniform_matrix<double>(4,1, 1e100), // upper bound constraint 9, // number of interpolation points
10, // initial trust region radius uniform_matrix<double>(4,1, -1e100), // lower bound constraint
1e-6, // stopping trust region radius uniform_matrix<double>(4,1, 1e100), // upper bound constraint
100 // max number of objective function evaluations 10, // initial trust region radius
); 1e-6, // stopping trust region radius
cout << starting_point << endl; 100 // max number of objective function evaluations
);
cout << starting_point << endl;
}
catch (std::exception& e)
{
cout << e.what() << endl;
}
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment