Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
D
dlib
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
钟尚武
dlib
Commits
cbb69de2
Commit
cbb69de2
authored
Jun 21, 2016
by
Fm
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Visual studio now compiles dnn_mnist_advanced, inception and dtest
parent
943a07cb
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
39 additions
and
20 deletions
+39
-20
cmake
dlib/cmake
+2
-0
layers.h
dlib/dnn/layers.h
+37
-20
No files found.
dlib/cmake
View file @
cbb69de2
...
...
@@ -78,6 +78,8 @@ elseif (MSVC OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # else if using Visu
message(STATUS "Enabling SSE2 instructions")
add_definitions(-DDLIB_HAVE_SSE2)
endif()
# DNN module produces long type names for NN definitions - disable this warning for MSVC
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4503")
endif()
...
...
dlib/dnn/layers.h
View file @
cbb69de2
...
...
@@ -1985,68 +1985,85 @@ namespace dlib
// ----------------------------------------------------------------------------------------
namespace
impl
{
// helper classes for layer concat processing
template
<
template
<
typename
>
class
...
TAG_TYPES
>
struct
concat_helper_impl
{
};
template
<
template
<
typename
>
class
TAG_TYPE
>
struct
concat_helper_impl
<
TAG_TYPE
>
{
constexpr
static
size_t
tag_count
()
{
return
1
;}
// // helper classes for layer concat processing
// template <template<typename> class... TAG_TYPES>
// struct concat_helper_impl {
// // this specialization will be used only by MSVC
// constexpr static size_t tag_count() {return 0;}
// static void list_tags(std::ostream& out)
// {
// }
// template<typename SUBNET>
// static void resize_out(resizable_tensor&, const SUBNET&, long)
// {
// }
// template<typename SUBNET>
// static void concat(tensor&, const SUBNET&, size_t)
// {
// }
// template<typename SUBNET>
// static void split(const tensor&, SUBNET&, size_t)
// {
// }
// };
template
<
template
<
typename
>
class
TAG_TYPE
,
template
<
typename
>
class
...
TAG_TYPES
>
struct
concat_helper_impl
{
constexpr
static
size_t
tag_count
()
{
return
1
+
concat_helper_impl
<
TAG_TYPES
...
>::
tag_count
();}
static
void
list_tags
(
std
::
ostream
&
out
)
{
out
<<
tag_id
<
TAG_TYPE
>::
id
;
out
<<
tag_id
<
TAG_TYPE
>::
id
<<
(
tag_count
()
>
1
?
","
:
""
);
concat_helper_impl
<
TAG_TYPES
...
>::
list_tags
(
out
);
}
template
<
typename
SUBNET
>
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
out
.
set_size
(
t
.
num_samples
(),
t
.
k
()
+
sum_k
,
t
.
nr
(),
t
.
nc
());
concat_helper_impl
<
TAG_TYPES
...
>::
resize_out
(
out
,
sub
,
sum_k
+
t
.
k
());
}
template
<
typename
SUBNET
>
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
concat
(
out
,
sub
,
k_offset
);
}
template
<
typename
SUBNET
>
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
split
(
input
,
sub
,
k_offset
);
}
};
template
<
template
<
typename
>
class
TAG_TYPE
,
template
<
typename
>
class
...
TAG_TYPES
>
struct
concat_helper_impl
<
TAG_TYPE
,
TAG_TYPES
...
>
{
constexpr
static
size_t
tag_count
()
{
return
1
+
concat_helper_impl
<
TAG_TYPES
...
>::
tag_count
();}
template
<
template
<
typename
>
class
TAG_TYPE
>
struct
concat_helper_impl
<
TAG_TYPE
>
{
constexpr
static
size_t
tag_count
()
{
return
1
;}
static
void
list_tags
(
std
::
ostream
&
out
)
{
out
<<
tag_id
<
TAG_TYPE
>::
id
<<
","
;
concat_helper_impl
<
TAG_TYPES
...
>::
list_tags
(
out
);
out
<<
tag_id
<
TAG_TYPE
>::
id
;
}
template
<
typename
SUBNET
>
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
concat_helper_impl
<
TAG_TYPES
...
>::
resize_out
(
out
,
sub
,
sum_k
+
t
.
k
());
out
.
set_size
(
t
.
num_samples
(),
t
.
k
()
+
sum_k
,
t
.
nr
(),
t
.
nc
());
}
template
<
typename
SUBNET
>
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
concat
(
out
,
sub
,
k_offset
);
}
template
<
typename
SUBNET
>
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
split
(
input
,
sub
,
k_offset
);
}
};
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment