Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dlib
Commits
76786430
Commit
76786430
authored
Dec 08, 2015
by
Davis King
Browse files
Added the dropout layer
parent
3222a3af
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
140 additions
and
0 deletions
+140
-0
dlib/dnn/layers.h
dlib/dnn/layers.h
+90
-0
dlib/dnn/layers_abstract.h
dlib/dnn/layers_abstract.h
+50
-0
No files found.
dlib/dnn/layers.h
View file @
76786430
...
...
@@ -333,6 +333,96 @@ namespace dlib
template
<
typename
SUBNET
>
using
fc
=
add_layer
<
fc_
,
SUBNET
>
;
// ----------------------------------------------------------------------------------------
class
dropout_
{
public:
explicit
dropout_
(
float
drop_rate_
=
0.5
)
:
drop_rate
(
drop_rate_
)
{
}
// We have to add a copy constructor and assignment operator because the rnd object
// is non-copyable.
dropout_
(
const
dropout_
&
item
)
:
drop_rate
(
item
.
drop_rate
),
mask
(
item
.
mask
)
{}
dropout_
&
operator
=
(
const
dropout_
&
item
)
{
if
(
this
==
&
item
)
return
*
this
;
drop_rate
=
item
.
drop_rate
;
mask
=
item
.
mask
;
return
*
this
;
}
float
get_drop_rate
(
)
const
{
return
drop_rate
;
}
template
<
typename
SUBNET
>
void
setup
(
const
SUBNET
&
/*sub*/
)
{
}
void
forward_inplace
(
const
tensor
&
input
,
tensor
&
output
)
{
// create a random mask and use it to filter the data
mask
.
copy_size
(
input
);
rnd
.
fill_uniform
(
mask
);
tt
::
threshold
(
mask
,
drop_rate
);
tt
::
multiply
(
output
,
input
,
mask
);
}
void
backward_inplace
(
const
tensor
&
/*computed_output*/
,
const
tensor
&
gradient_input
,
tensor
&
data_grad
,
tensor
&
/*params_grad*/
)
{
tt
::
multiply
(
data_grad
,
mask
,
gradient_input
);
}
const
tensor
&
get_layer_params
()
const
{
return
params
;
}
tensor
&
get_layer_params
()
{
return
params
;
}
friend
void
serialize
(
const
dropout_
&
item
,
std
::
ostream
&
out
)
{
serialize
(
"dropout_"
,
out
);
serialize
(
item
.
drop_rate
,
out
);
serialize
(
item
.
mask
,
out
);
}
friend
void
deserialize
(
dropout_
&
item
,
std
::
istream
&
in
)
{
std
::
string
version
;
deserialize
(
version
,
in
);
if
(
version
!=
"dropout_"
)
throw
serialization_error
(
"Unexpected version found while deserializing dlib::dropout_."
);
deserialize
(
item
.
drop_rate
,
in
);
deserialize
(
item
.
mask
,
in
);
}
private:
float
drop_rate
;
resizable_tensor
mask
;
tt
::
tensor_rand
rnd
;
resizable_tensor
params
;
// unused
};
template
<
typename
SUBNET
>
using
dropout
=
add_layer
<
dropout_
,
SUBNET
>
;
// ----------------------------------------------------------------------------------------
class
relu_
...
...
dlib/dnn/layers_abstract.h
View file @
76786430
...
...
@@ -470,6 +470,56 @@ namespace dlib
template
<
typename
SUBNET
>
using
con
=
add_layer
<
con_
,
SUBNET
>
;
// ----------------------------------------------------------------------------------------
class
dropout_
{
/*!
WHAT THIS OBJECT REPRESENTS
This is an implementation of the EXAMPLE_LAYER_ interface defined above.
In particular, it defines a dropout layer. Therefore, it passes its inputs
through the stochastic function f(x) which outputs either 0 or x. The
probability of 0 being output is given by the drop_rate argument to this
object's constructor.
!*/
public:
explicit
dropout_
(
float
drop_rate
=
0.5
);
/*!
ensures
- #get_drop_rate() == drop_rate
!*/
float
get_drop_rate
(
)
const
;
/*!
ensures
- returns the probability that an individual input value to this layer will
be replaced with 0.
!*/
template
<
typename
SUBNET
>
void
setup
(
const
SUBNET
&
sub
);
void
forward_inplace
(
const
tensor
&
input
,
tensor
&
output
);
void
backward_inplace
(
const
tensor
&
computed_output
,
const
tensor
&
gradient_input
,
tensor
&
data_grad
,
tensor
&
params_grad
);
const
tensor
&
get_layer_params
()
const
;
tensor
&
get_layer_params
();
/*!
These functions are implemented as described in the EXAMPLE_LAYER_ interface.
!*/
};
void
serialize
(
const
dropout_
&
item
,
std
::
ostream
&
out
);
void
deserialize
(
dropout_
&
item
,
std
::
istream
&
in
);
/*!
provides serialization support
!*/
template
<
typename
SUBNET
>
using
dropout
=
add_layer
<
dropout_
,
SUBNET
>
;
// ----------------------------------------------------------------------------------------
class
relu_
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment