Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
D
dlib
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
钟尚武
dlib
Commits
87b182b5
Commit
87b182b5
authored
Nov 04, 2015
by
Davis King
Browse files
Options
Browse Files
Download
Plain Diff
merged
parents
24698f87
92073c20
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
48 additions
and
35 deletions
+48
-35
core.h
dlib/dnn/core.h
+29
-34
core_abstract.h
dlib/dnn/core_abstract.h
+19
-1
No files found.
dlib/dnn/core.h
View file @
87b182b5
...
...
@@ -7,6 +7,7 @@
#include "tensor.h"
#include <iterator>
#include <memory>
#include <sstream>
#include <type_traits>
#include "../statistics.h"
#include "../rand.h"
...
...
@@ -1542,23 +1543,29 @@ namespace dlib
resizable_tensor
gradient_input
;
};
}
// TODO, remove?
inline
void
print_tensor
(
const
tensor
&
a
)
struct
layer_test_results
{
auto
data
=
a
.
host
();
for
(
size_t
i
=
0
;
i
<
a
.
size
();
++
i
)
std
::
cout
<<
data
[
i
]
<<
" "
;
std
::
cout
<<
std
::
endl
;
}
layer_test_results
()
:
was_good
(
true
)
{}
explicit
layer_test_results
(
const
std
::
string
&
l
)
:
log
(
l
),
was_good
(
false
)
{}
std
::
string
log
;
bool
was_good
;
operator
bool
()
const
{
return
was_good
;
}
};
inline
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
layer_test_results
&
item
)
{
out
<<
item
.
log
;
return
out
;
}
template
<
typename
layer_details_type
>
void
test_layer
(
layer_test_results
test_layer
(
layer_details_type
l
)
{
...
...
@@ -1577,12 +1584,13 @@ namespace dlib
resizable_tensor
input_grad
;
input_grad
.
copy_size
(
output
);
std
::
cout
<<
"output.num_samples(): "
<<
output
.
num_samples
()
<<
std
::
endl
;
fill_with_gassuan_random_numbers
(
input_grad
,
rnd
);
std
::
ostringstream
sout
;
// The f() we are computing gradients of is this thing. It's value at the current
// parameter and data values is:
std
::
c
out
<<
"f(data,params): "
<<
dot
(
output
,
input_grad
)
<<
std
::
endl
;
//s
out << "f(data,params): " << dot(output, input_grad) << std::endl;
// We are going to save a copy of the subnetwork.get_gradient_input() data before we do
// backpropagation since the backward() function is supposed to *add* to the
...
...
@@ -1604,7 +1612,6 @@ namespace dlib
params_grad
=
random_noise
;
l
.
backward
(
output
,
input_grad
,
subnetwork
,
params_grad
);
running_stats
<
double
>
rs_param
,
rs_data
;
// ==================================================================
// first validate the way the parameter gradients are computed
...
...
@@ -1629,12 +1636,12 @@ namespace dlib
if
(
std
::
abs
(
relative_error
)
>
0
.
01
)
{
using
namespace
std
;
cout
<<
"PARAM ERROR: "
<<
relative_error
<<
endl
;
cout
<<
" reference_derivative: "
<<
reference_derivative
<<
endl
;
cout
<<
" output_derivative: "
<<
output_derivative
<<
endl
;
sout
<<
"Gradient error in parameter #"
<<
i
<<
". Relative error: "
<<
relative_error
<<
endl
;
sout
<<
"expected derivative: "
<<
reference_derivative
<<
endl
;
sout
<<
"output derivative: "
<<
output_derivative
<<
endl
;
return
layer_test_results
(
sout
.
str
());
}
rs_param
.
add
(
std
::
abs
(
relative_error
));
}
// ==================================================================
...
...
@@ -1658,26 +1665,14 @@ namespace dlib
if
(
std
::
abs
(
relative_error
)
>
0
.
01
)
{
using
namespace
std
;
cout
<<
"DATA ERROR: "
<<
relative_error
<<
endl
;
cout
<<
" reference_derivative: "
<<
reference_derivative
<<
endl
;
cout
<<
" output_derivative: "
<<
output_derivative
<<
endl
;
sout
<<
"Gradient error in data variable #"
<<
i
<<
". Relative error: "
<<
relative_error
<<
endl
;
sout
<<
"expected derivative: "
<<
reference_derivative
<<
endl
;
sout
<<
"output derivative: "
<<
output_derivative
<<
endl
;
return
layer_test_results
(
sout
.
str
());
}
rs_data
.
add
(
std
::
abs
(
relative_error
));
}
using
namespace
std
;
if
(
rs_param
.
current_n
()
>
1
)
{
cout
<<
"rs_param.mean(): "
<<
rs_param
.
mean
()
<<
endl
;
cout
<<
"rs_param.stddev(): "
<<
rs_param
.
stddev
()
<<
endl
;
cout
<<
"rs_param.max(): "
<<
rs_param
.
max
()
<<
endl
;
}
if
(
rs_data
.
current_n
()
>
1
)
{
cout
<<
"rs_data.mean(): "
<<
rs_data
.
mean
()
<<
endl
;
cout
<<
"rs_data.stddev(): "
<<
rs_data
.
stddev
()
<<
endl
;
cout
<<
"rs_data.max(): "
<<
rs_data
.
max
()
<<
endl
;
}
return
layer_test_results
();
}
// ----------------------------------------------------------------------------------------
...
...
dlib/dnn/core_abstract.h
View file @
87b182b5
...
...
@@ -965,10 +965,24 @@ namespace dlib
// ----------------------------------------------------------------------------------------
struct
layer_test_results
{
std
::
string
log
;
bool
was_good
;
operator
bool
()
const
{
return
was_good
;
}
};
inline
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
layer_test_results
&
item
)
{
out
<<
item
.
log
;
return
out
;
}
template
<
typename
layer_details_type
>
void
test_layer
(
layer_test_results
test_layer
(
layer_details_type
l
);
/*!
...
...
@@ -976,6 +990,10 @@ namespace dlib
- Checks if l correctly implements the EXAMPLE_LAYER_ interface defined in
layers_abstract.h. Importantly, it computes numerical approximations to the
gradients and compares them to the outputs of the layer.
- The results of the testing are returned. In particular, if the returned object
is RESULT then we will have:
- RESULT.was_good == false if and only if the layer failed the testing.
- RESULT.log == a string describing why the testing failed if was_good==false.
- Note that this function is only capable of checking layers that take
arbitrary subnetworks as input. So if you have designed a layer that expects
only a certain restricted type of subnetwork then you might get a compile or
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment