Commit 02c27ff9 authored by Davis King's avatar Davis King

fixed formatting

parent 2e39e5fa
...@@ -63,8 +63,8 @@ using net_type = loss_multiclass_log<fc<number_of_classes, ...@@ -63,8 +63,8 @@ using net_type = loss_multiclass_log<fc<number_of_classes,
repeat<9,res, // repeat this layer 9 times repeat<9,res, // repeat this layer 9 times
res_down< res_down<
res< res<
input<matrix<unsigned char> input<matrix<unsigned char>>
>>>>>>>>>>>; >>>>>>>>>>;
// And finally, let's define a residual network building block that uses // And finally, let's define a residual network building block that uses
...@@ -116,8 +116,8 @@ int main(int argc, char** argv) try ...@@ -116,8 +116,8 @@ int main(int argc, char** argv) try
tag4<repeat<9,pres, // 9 groups, each containing 2 prelu layers tag4<repeat<9,pres, // 9 groups, each containing 2 prelu layers
res_down< res_down<
res< res<
input<matrix<unsigned char> input<matrix<unsigned char>>
>>>>>>>>>>>>; >>>>>>>>>>>;
// prelu layers have a floating point parameter. If you want to set it to // prelu layers have a floating point parameter. If you want to set it to
// something other than its default value you can do so like this: // something other than its default value you can do so like this:
...@@ -251,8 +251,8 @@ int main(int argc, char** argv) try ...@@ -251,8 +251,8 @@ int main(int argc, char** argv) try
repeat<9,res, repeat<9,res,
ares_down< ares_down<
ares< ares<
input<matrix<unsigned char> input<matrix<unsigned char>>
>>>>>>>>>>>; >>>>>>>>>>;
// Then we can simply assign our trained net to our testing net. // Then we can simply assign our trained net to our testing net.
test_net_type tnet = net; test_net_type tnet = net;
// Or if you only had a file with your trained network you could deserialize // Or if you only had a file with your trained network you could deserialize
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment