Commit fc6992ac authored by Davis King's avatar Davis King
Browse files

A little bit of cleanup

parent 10d7f119
......@@ -59,7 +59,7 @@ namespace dlib
void set_learning_rate_multiplier(
T& obj,
double learning_rate_multiplier
)
);
/*!
requires
- learning_rate_multiplier >= 0
......@@ -88,7 +88,7 @@ namespace dlib
void set_bias_learning_rate_multiplier(
T& obj,
double bias_learning_rate_multiplier
)
);
/*!
requires
- bias_learning_rate_multiplier >= 0
......
......@@ -311,12 +311,6 @@ namespace dlib
visitor_learning_rate_multiplier(double new_learning_rate_multiplier_) :
new_learning_rate_multiplier(new_learning_rate_multiplier_) {}
template <typename T>
void set_new_learning_rate_multiplier(T& l) const
{
set_learning_rate_multiplier(l, new_learning_rate_multiplier);
}
template <typename input_layer_type>
void operator()(size_t , input_layer_type& ) const
{
......@@ -326,7 +320,7 @@ namespace dlib
template <typename T, typename U, typename E>
void operator()(size_t , add_layer<T,U,E>& l) const
{
set_new_learning_rate_multiplier(l.layer_details());
set_learning_rate_multiplier(l.layer_details(), new_learning_rate_multiplier);
}
private:
......
......@@ -47,12 +47,6 @@ public:
visitor_weight_decay_multiplier(double new_weight_decay_multiplier_) :
new_weight_decay_multiplier(new_weight_decay_multiplier_) {}
template <typename T>
void set_new_weight_decay_mulitplier(T& l) const
{
set_weight_decay_multiplier(l, new_weight_decay_multiplier);
}
template<typename input_layer_type>
void operator()(size_t , input_layer_type& ) const
{
......@@ -62,7 +56,7 @@ public:
template <typename T, typename U, typename E>
void operator()(size_t , add_layer<T,U,E>& l) const
{
set_new_weight_decay_mulitplier(l.layer_details());
set_weight_decay_multiplier(l.layer_details(), new_weight_decay_multiplier);
}
private:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment