Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dlib
Commits
6168781a
Commit
6168781a
authored
Sep 10, 2016
by
Lucas Clemente Vella
Committed by
Davis E. King
Sep 10, 2016
Browse files
Adding specific parameters interface on fc_ layer (#213)
parent
48df23a0
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
80 additions
and
0 deletions
+80
-0
dlib/dnn/layers.h
dlib/dnn/layers.h
+24
-0
dlib/dnn/layers_abstract.h
dlib/dnn/layers_abstract.h
+56
-0
No files found.
dlib/dnn/layers.h
View file @
6168781a
...
...
@@ -1086,6 +1086,30 @@ namespace dlib
tt
::
gemm
(
1
,
sub
.
get_gradient_input
(),
1
,
gradient_input
,
false
,
w
,
true
);
}
alias_tensor_instance
get_weights
()
{
return
weights
(
params
,
0
);
}
alias_tensor_const_instance
get_weights
()
const
{
return
weights
(
params
,
0
);
}
alias_tensor_instance
get_biases
()
{
static_assert
(
bias_mode
==
FC_HAS_BIAS
,
"This fc_ layer doesn't have a bias vector "
"to be retrieved, as per template parameter 'bias_mode'."
);
return
biases
(
params
,
weights
.
size
());
}
alias_tensor_const_instance
get_biases
()
const
{
static_assert
(
bias_mode
==
FC_HAS_BIAS
,
"This fc_ layer doesn't have a bias vector "
"to be retrieved, as per template parameter 'bias_mode'."
);
return
biases
(
params
,
weights
.
size
());
}
const
tensor
&
get_layer_params
()
const
{
return
params
;
}
tensor
&
get_layer_params
()
{
return
params
;
}
...
...
dlib/dnn/layers_abstract.h
View file @
6168781a
...
...
@@ -539,6 +539,62 @@ namespace dlib
- #get_bias_weight_decay_multiplier() == val
!*/
alias_tensor_const_instance
get_weights
(
)
const
;
/*!
ensures
- returns an alias of get_layer_params(), containing the weights matrix of
the fully connected layer.
- #get_weights().num_samples() is the number of elements in input sample,
i.e. sublayer's output's k * nc * nr.
- #get_bias().k() == #get_num_outputs()
- if get_bias_mode() == FC_HAS_BIAS:
- #get_layer_params().size() == (#get_weights().size() + #get_biases().size())
- else:
- #get_layer_params().size() == #get_weights().size()
!*/
alias_tensor_instance
get_weights
(
);
/*!
ensures
- returns an alias of get_layer_params(), containing the weights matrix of
the fully connected layer.
- #get_weights().num_samples() is the number of elements in input sample,
i.e. sublayer's output's k * nc * nr.
- #get_bias().k() == #get_num_outputs()
- if get_bias_mode() == FC_HAS_BIAS:
- #get_layer_params().size() == (#get_weights().size() + #get_biases().size())
- else:
- #get_layer_params().size() == #get_weights().size()
!*/
alias_tensor_const_instance
get_biases
(
)
const
;
/*!
requires
- #get_bias_mode() == FC_HAS_BIAS
ensures
- returns an alias of get_layer_params(), containing the bias vector of
the fully connected layer.
- #get_bias().num_samples() == 1
- #get_bias().k() == #get_num_outputs()
- #get_layer_params().size() == (#get_weights().size() + #get_biases().size())
!*/
alias_tensor_instance
get_biases
(
);
/*!
requires
- #get_bias_mode() == FC_HAS_BIAS
ensures
- returns an alias of get_layer_params(), containing the bias vector of
the fully connected layer.
- #get_bias().num_samples() == 1
- #get_bias().k() == #get_num_outputs()
- #get_layer_params().size() == (#get_weights().size() + #get_biases().size())
!*/
template
<
typename
SUBNET
>
void
setup
(
const
SUBNET
&
sub
);
template
<
typename
SUBNET
>
void
forward
(
const
SUBNET
&
sub
,
resizable_tensor
&
output
);
template
<
typename
SUBNET
>
void
backward
(
const
tensor
&
gradient_input
,
SUBNET
&
sub
,
tensor
&
params_grad
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment