Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dlib
Commits
1569d590
"docs/git@developer.sourcefind.cn:OpenDAS/dgl.git" did not exist on "5d3f470b721db2a23dcc1a3880c1e31216e06233"
Commit
1569d590
authored
Oct 18, 2015
by
Davis King
Browse files
Made more move constructors use swap() just to be safe
parent
1de8eaf8
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
42 additions
and
9 deletions
+42
-9
dlib/dnn/core.h
dlib/dnn/core.h
+42
-9
No files found.
dlib/dnn/core.h
View file @
1569d590
...
@@ -190,9 +190,9 @@ namespace dlib
...
@@ -190,9 +190,9 @@ namespace dlib
}
}
add_layer
(
const
add_layer
&
)
=
default
;
add_layer
(
const
add_layer
&
)
=
default
;
add_layer
(
add_layer
&&
)
=
default
;
add_layer
&
operator
=
(
add_layer
&&
)
=
default
;
add_layer
&
operator
=
(
const
add_layer
&
)
=
default
;
add_layer
&
operator
=
(
const
add_layer
&
)
=
default
;
add_layer
(
add_layer
&&
item
)
:
add_layer
()
{
swap
(
item
);
}
add_layer
&
operator
=
(
add_layer
&&
item
)
{
swap
(
item
);
return
*
this
;
}
template
<
typename
T
,
typename
U
,
typename
E
>
template
<
typename
T
,
typename
U
,
typename
E
>
friend
class
add_layer
;
friend
class
add_layer
;
...
@@ -345,6 +345,16 @@ namespace dlib
...
@@ -345,6 +345,16 @@ namespace dlib
private:
private:
void
swap
(
add_layer
&
item
)
{
std
::
swap
(
subnetwork
,
item
.
subnetwork
);
std
::
swap
(
details
,
item
.
details
);
std
::
swap
(
this_layer_setup_called
,
item
.
this_layer_setup_called
);
std
::
swap
(
gradient_input_is_stale
,
item
.
gradient_input_is_stale
);
std
::
swap
(
x_grad
,
item
.
x_grad
);
std
::
swap
(
cached_output
,
item
.
cached_output
);
}
subnet_type
subnetwork
;
subnet_type
subnetwork
;
LAYER_DETAILS
details
;
LAYER_DETAILS
details
;
...
@@ -384,9 +394,9 @@ namespace dlib
...
@@ -384,9 +394,9 @@ namespace dlib
{}
{}
add_layer
(
const
add_layer
&
)
=
default
;
add_layer
(
const
add_layer
&
)
=
default
;
add_layer
(
add_layer
&&
)
=
default
;
add_layer
(
add_layer
&&
item
)
:
add_layer
()
{
swap
(
item
);
}
add_layer
&
operator
=
(
add_layer
&&
)
=
default
;
add_layer
&
operator
=
(
const
add_layer
&
)
=
default
;
add_layer
&
operator
=
(
const
add_layer
&
)
=
default
;
add_layer
&
operator
=
(
add_layer
&&
item
)
{
swap
(
item
);
return
*
this
;
}
template
<
typename
T
,
typename
U
,
typename
E
>
template
<
typename
T
,
typename
U
,
typename
E
>
friend
class
add_layer
;
friend
class
add_layer
;
...
@@ -574,6 +584,16 @@ namespace dlib
...
@@ -574,6 +584,16 @@ namespace dlib
resizable_tensor
&
grad_final_ignored
;
resizable_tensor
&
grad_final_ignored
;
};
};
void
swap
(
add_layer
&
item
)
{
std
::
swap
(
input_layer
,
item
.
input_layer
);
std
::
swap
(
details
,
item
.
details
);
std
::
swap
(
this_layer_setup_called
,
item
.
this_layer_setup_called
);
std
::
swap
(
gradient_input_is_stale
,
item
.
gradient_input_is_stale
);
std
::
swap
(
x_grad
,
item
.
x_grad
);
std
::
swap
(
cached_output
,
item
.
cached_output
);
}
subnet_type
input_layer
;
subnet_type
input_layer
;
LAYER_DETAILS
details
;
LAYER_DETAILS
details
;
bool
this_layer_setup_called
;
bool
this_layer_setup_called
;
...
@@ -714,9 +734,9 @@ namespace dlib
...
@@ -714,9 +734,9 @@ namespace dlib
add_tag_layer
()
=
default
;
add_tag_layer
()
=
default
;
add_tag_layer
(
const
add_tag_layer
&
)
=
default
;
add_tag_layer
(
const
add_tag_layer
&
)
=
default
;
add_tag_layer
(
add_tag_layer
&&
)
=
default
;
add_tag_layer
&
operator
=
(
add_tag_layer
&&
)
=
default
;
add_tag_layer
&
operator
=
(
const
add_tag_layer
&
)
=
default
;
add_tag_layer
&
operator
=
(
const
add_tag_layer
&
)
=
default
;
add_tag_layer
(
add_tag_layer
&&
item
)
:
add_tag_layer
()
{
swap
(
item
);
}
add_tag_layer
&
operator
=
(
add_tag_layer
&&
item
)
{
swap
(
item
);
return
*
this
;
}
template
<
typename
T
,
typename
E
>
template
<
typename
T
,
typename
E
>
add_tag_layer
(
add_tag_layer
(
...
@@ -815,6 +835,13 @@ namespace dlib
...
@@ -815,6 +835,13 @@ namespace dlib
private:
private:
void
swap
(
add_tag_layer
&
item
)
{
std
::
swap
(
input_layer
,
item
.
input_layer
);
std
::
swap
(
cached_output
,
item
.
cached_output
);
std
::
swap
(
grad_final_ignored
,
item
.
grad_final_ignored
);
}
subnet_type
input_layer
;
subnet_type
input_layer
;
resizable_tensor
cached_output
;
resizable_tensor
cached_output
;
resizable_tensor
grad_final_ignored
;
resizable_tensor
grad_final_ignored
;
...
@@ -876,11 +903,11 @@ namespace dlib
...
@@ -876,11 +903,11 @@ namespace dlib
"The loss layer and input layer must agree on the sample_expansion_factor."
);
"The loss layer and input layer must agree on the sample_expansion_factor."
);
add_loss_layer
()
=
default
;
add_loss_layer
()
{}
;
add_loss_layer
(
const
add_loss_layer
&
)
=
default
;
add_loss_layer
(
const
add_loss_layer
&
)
=
default
;
add_loss_layer
(
add_loss_layer
&&
)
=
default
;
add_loss_layer
&
operator
=
(
add_loss_layer
&&
)
=
default
;
add_loss_layer
&
operator
=
(
const
add_loss_layer
&
)
=
default
;
add_loss_layer
&
operator
=
(
const
add_loss_layer
&
)
=
default
;
add_loss_layer
(
add_loss_layer
&&
item
)
:
add_loss_layer
()
{
swap
(
item
);
}
add_loss_layer
&
operator
=
(
add_loss_layer
&&
item
)
{
swap
(
item
);
return
*
this
;
}
template
<
typename
T
,
typename
U
>
template
<
typename
T
,
typename
U
>
add_loss_layer
(
add_loss_layer
(
...
@@ -1079,6 +1106,12 @@ namespace dlib
...
@@ -1079,6 +1106,12 @@ namespace dlib
private:
private:
void
swap
(
add_loss_layer
&
item
)
{
std
::
swap
(
loss
,
item
.
loss
);
std
::
swap
(
subnetwork
,
item
.
subnetwork
);
}
loss_details_type
loss
;
loss_details_type
loss
;
subnet_type
subnetwork
;
subnet_type
subnetwork
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment