Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dlib
Commits
cbb69de2
Commit
cbb69de2
authored
Jun 21, 2016
by
Fm
Browse files
Visual studio now compiles dnn_mnist_advanced, inception and dtest
parent
943a07cb
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
41 additions
and
22 deletions
+41
-22
dlib/cmake
dlib/cmake
+2
-0
dlib/dnn/layers.h
dlib/dnn/layers.h
+39
-22
No files found.
dlib/cmake
View file @
cbb69de2
...
@@ -78,6 +78,8 @@ elseif (MSVC OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # else if using Visu
...
@@ -78,6 +78,8 @@ elseif (MSVC OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # else if using Visu
message(STATUS "Enabling SSE2 instructions")
message(STATUS "Enabling SSE2 instructions")
add_definitions(-DDLIB_HAVE_SSE2)
add_definitions(-DDLIB_HAVE_SSE2)
endif()
endif()
# DNN module produces long type names for NN definitions - disable this warning for MSVC
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4503")
endif()
endif()
...
...
dlib/dnn/layers.h
View file @
cbb69de2
...
@@ -1985,68 +1985,85 @@ namespace dlib
...
@@ -1985,68 +1985,85 @@ namespace dlib
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
namespace
impl
{
namespace
impl
{
// helper classes for layer concat processing
// // helper classes for layer concat processing
template
<
template
<
typename
>
class
...
TAG_TYPES
>
// template <template<typename> class... TAG_TYPES>
struct
concat_helper_impl
{
// struct concat_helper_impl {
};
// // this specialization will be used only by MSVC
template
<
template
<
typename
>
class
TAG_TYPE
>
// constexpr static size_t tag_count() {return 0;}
struct
concat_helper_impl
<
TAG_TYPE
>
{
// static void list_tags(std::ostream& out)
constexpr
static
size_t
tag_count
()
{
return
1
;}
// {
static
void
list_tags
(
std
::
ostream
&
out
)
// }
{
// template<typename SUBNET>
out
<<
tag_id
<
TAG_TYPE
>::
id
;
// static void resize_out(resizable_tensor&, const SUBNET&, long)
// {
// }
// template<typename SUBNET>
// static void concat(tensor&, const SUBNET&, size_t)
// {
// }
// template<typename SUBNET>
// static void split(const tensor&, SUBNET&, size_t)
// {
// }
// };
template
<
template
<
typename
>
class
TAG_TYPE
,
template
<
typename
>
class
...
TAG_TYPES
>
struct
concat_helper_impl
{
constexpr
static
size_t
tag_count
()
{
return
1
+
concat_helper_impl
<
TAG_TYPES
...
>::
tag_count
();}
static
void
list_tags
(
std
::
ostream
&
out
)
{
out
<<
tag_id
<
TAG_TYPE
>::
id
<<
(
tag_count
()
>
1
?
","
:
""
);
concat_helper_impl
<
TAG_TYPES
...
>::
list_tags
(
out
);
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
out
.
set_size
(
t
.
num_samples
(),
t
.
k
()
+
sum_k
,
t
.
nr
(),
t
.
nc
());
concat_helper_impl
<
TAG_TYPES
...
>::
resize_out
(
out
,
sub
,
sum_k
+
t
.
k
());
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
concat
(
out
,
sub
,
k_offset
);
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
split
(
input
,
sub
,
k_offset
);
}
}
};
};
template
<
template
<
typename
>
class
TAG_TYPE
,
template
<
typename
>
class
...
TAG_TYPES
>
template
<
template
<
typename
>
class
TAG_TYPE
>
struct
concat_helper_impl
<
TAG_TYPE
,
TAG_TYPES
...
>
{
struct
concat_helper_impl
<
TAG_TYPE
>
{
constexpr
static
size_t
tag_count
()
{
return
1
;}
constexpr
static
size_t
tag_count
()
{
return
1
+
concat_helper_impl
<
TAG_TYPES
...
>::
tag_count
();}
static
void
list_tags
(
std
::
ostream
&
out
)
static
void
list_tags
(
std
::
ostream
&
out
)
{
{
out
<<
tag_id
<
TAG_TYPE
>::
id
<<
","
;
out
<<
tag_id
<
TAG_TYPE
>::
id
;
concat_helper_impl
<
TAG_TYPES
...
>::
list_tags
(
out
);
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
static
void
resize_out
(
resizable_tensor
&
out
,
const
SUBNET
&
sub
,
long
sum_k
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
concat_helper_impl
<
TAG_TYPES
...
>::
resize_out
(
out
,
sub
,
sum_k
+
t
.
k
());
out
.
set_size
(
t
.
num_samples
(),
t
.
k
()
+
sum_k
,
t
.
nr
(),
t
.
nc
());
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
static
void
concat
(
tensor
&
out
,
const
SUBNET
&
sub
,
size_t
k_offset
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_output
();
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
tt
::
copy_tensor
(
out
,
k_offset
,
t
,
0
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
concat
(
out
,
sub
,
k_offset
);
}
}
template
<
typename
SUBNET
>
template
<
typename
SUBNET
>
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
static
void
split
(
const
tensor
&
input
,
SUBNET
&
sub
,
size_t
k_offset
)
{
{
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
auto
&
t
=
layer
<
TAG_TYPE
>
(
sub
).
get_gradient_input
();
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
tt
::
copy_tensor
(
t
,
0
,
input
,
k_offset
,
t
.
k
());
k_offset
+=
t
.
k
();
concat_helper_impl
<
TAG_TYPES
...
>::
split
(
input
,
sub
,
k_offset
);
}
}
};
};
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment