Unverified Commit 79143c31 authored by Masaki Kozuki's avatar Masaki Kozuki Committed by GitHub
Browse files

Revert "officially deprecate and clarify the plan of pyprof removal (#1315)" (#1320)

This reverts commit 74e04667.
parent adbe075a
...@@ -139,7 +139,7 @@ A Python-only build omits: ...@@ -139,7 +139,7 @@ A Python-only build omits:
`DistributedDataParallel`, `amp`, and `SyncBatchNorm` will still be usable, but they may be slower. `DistributedDataParallel`, `amp`, and `SyncBatchNorm` will still be usable, but they may be slower.
Pyprof support has been moved to its own [dedicated repository](https://github.com/NVIDIA/PyProf). Pyprof support has been moved to its own [dedicated repository](https://github.com/NVIDIA/PyProf).
The codebase is deprecated in Apex and pyprof directory will be removed by the end of June, 2022. The codebase is deprecated in Apex and will be removed soon.
### Windows support ### Windows support
Windows support is experimental, and Linux is recommended. `pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" .` may work if you were able to build Pytorch from source Windows support is experimental, and Linux is recommended. `pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" .` may work if you were able to build Pytorch from source
......
...@@ -16,7 +16,6 @@ The NVTX markers (one or more) contain the following information ...@@ -16,7 +16,6 @@ The NVTX markers (one or more) contain the following information
numpy: name, shape and datatype numpy: name, shape and datatype
list/tuple: a sequence of scalars or tensors or numpy arrays list/tuple: a sequence of scalars or tensors or numpy arrays
""" """
import warnings
import torch import torch
import torch.cuda.nvtx as nvtx import torch.cuda.nvtx as nvtx
...@@ -66,10 +65,6 @@ def modMarker(mod, fn_name, args): ...@@ -66,10 +65,6 @@ def modMarker(mod, fn_name, args):
return str(d) return str(d)
def add_wrapper(mod, fn_name): def add_wrapper(mod, fn_name):
warnings.warn(
"apex.pyprof module will be removed by the end of June, 2022.",
FutureWarning,
)
assert isfunc(mod, fn_name) assert isfunc(mod, fn_name)
# Get a pointer to the original function # Get a pointer to the original function
...@@ -143,7 +138,7 @@ def argMarker(mod, op, args, kwargs): ...@@ -143,7 +138,7 @@ def argMarker(mod, op, args, kwargs):
# The arg could be torch.Size, which is a subclass of tuple # The arg could be torch.Size, which is a subclass of tuple
# Therefore, explicitly convert to tuple # Therefore, explicitly convert to tuple
a['value'] = tuple(arg) a['value'] = tuple(arg)
cadena['args'].append(a) cadena['args'].append(a)
def scalar(arg, name=""): def scalar(arg, name=""):
...@@ -209,10 +204,6 @@ def patchClass(cls): ...@@ -209,10 +204,6 @@ def patchClass(cls):
add_wrapper(cls, f) add_wrapper(cls, f)
def init(): def init():
warnings.warn(
f"apex.pyprof module will be removed by the end of June, 2022.",
FutureWarning,
)
string = "\n\nPyprof has been moved to its own dedicated repository and will " + \ string = "\n\nPyprof has been moved to its own dedicated repository and will " + \
"soon be removed from Apex. Please visit\n" + \ "soon be removed from Apex. Please visit\n" + \
"https://github.com/NVIDIA/PyProf\n" + \ "https://github.com/NVIDIA/PyProf\n" + \
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment