Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
fairscale
Commits
4f7f0853
Unverified
Commit
4f7f0853
authored
Sep 05, 2021
by
Tim Brooks
Committed by
GitHub
Sep 05, 2021
Browse files
Add method for disabling gradient checkpointing (#772)
See
https://github.com/facebookresearch/fairscale/issues/771
parent
3ecf76f4
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
1 deletion
+13
-1
fairscale/nn/checkpoint/checkpoint_activations.py
fairscale/nn/checkpoint/checkpoint_activations.py
+13
-1
No files found.
fairscale/nn/checkpoint/checkpoint_activations.py
View file @
4f7f0853
...
@@ -25,11 +25,23 @@ class ThreadLocal(threading.local):
...
@@ -25,11 +25,23 @@ class ThreadLocal(threading.local):
def
__init__
(
self
)
->
None
:
def
__init__
(
self
)
->
None
:
self
.
is_checkpointing
=
False
self
.
is_checkpointing
=
False
self
.
is_recomputing
=
False
self
.
is_recomputing
=
False
self
.
is_checkpointing_disabled
=
False
thread_local
=
ThreadLocal
()
thread_local
=
ThreadLocal
()
@
contextmanager
def
disable_checkpointing
()
->
Generator
[
None
,
None
,
None
]:
"""Makes :func:`is_checkpointing_disabled` return :data:`True` within a context."""
orig
=
thread_local
.
is_checkpointing_disabled
thread_local
.
is_checkpointing_disabled
=
True
try
:
yield
finally
:
thread_local
.
is_checkpointing_disabled
=
orig
@
contextmanager
@
contextmanager
def
enable_checkpointing
()
->
Generator
[
None
,
None
,
None
]:
def
enable_checkpointing
()
->
Generator
[
None
,
None
,
None
]:
"""Makes :func:`is_checkpointing` return :data:`True` within a context."""
"""Makes :func:`is_checkpointing` return :data:`True` within a context."""
...
@@ -164,7 +176,7 @@ def _checkpointed_forward(
...
@@ -164,7 +176,7 @@ def _checkpointed_forward(
# which would be an issue during eval since there wouldn't be a corresponding backward pass
# which would be an issue during eval since there wouldn't be a corresponding backward pass
# to decrement the fwd counter.
# to decrement the fwd counter.
# See https://github.com/facebookresearch/fairscale/pull/709.
# See https://github.com/facebookresearch/fairscale/pull/709.
if
not
torch
.
is_grad_enabled
():
if
not
torch
.
is_grad_enabled
()
or
thread_local
.
is_checkpointing_disabled
:
return
original_forward
(
module
,
*
args
,
**
kwargs
)
return
original_forward
(
module
,
*
args
,
**
kwargs
)
# Autograd Functions in PyTorch work best with positional args, since
# Autograd Functions in PyTorch work best with positional args, since
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment