Unverified Commit 54e46e21 authored by Deepak Narayanan's avatar Deepak Narayanan Committed by GitHub
Browse files

Use non-deprecated PyTorch methods to silence warnings (#541)



Getting warnings of the following form using ToT TE:

```
/usr/local/lib/python3.10/dist-packages/transformer_engine/pytorch/attention.py:852: UserWarning: TypedStorage is deprecated. It will be removed in the future and UntypedStorage will be the only storage class. This should only matter to you if you are using storages directly.  To access UntypedStorage directly, use tensor.untyped_storage() instead of tensor.storage()
   data_ptr = grad_outputs[0].storage().data_ptr()
```
Signed-off-by: default avatarDeepak Narayanan <2724038+deepakn94@users.noreply.github.com>
parent cbcac3f5
...@@ -850,7 +850,7 @@ class _SplitAlongDim(torch.autograd.Function): ...@@ -850,7 +850,7 @@ class _SplitAlongDim(torch.autograd.Function):
noop_ok = True noop_ok = True
strides = grad_outputs[0].stride() strides = grad_outputs[0].stride()
data_ptr = grad_outputs[0].storage().data_ptr() data_ptr = grad_outputs[0].untyped_storage().data_ptr()
shape = list(grad_outputs[0].shape) shape = list(grad_outputs[0].shape)
for i, tensor in enumerate(grad_outputs): for i, tensor in enumerate(grad_outputs):
shape_i = shape shape_i = shape
...@@ -858,7 +858,7 @@ class _SplitAlongDim(torch.autograd.Function): ...@@ -858,7 +858,7 @@ class _SplitAlongDim(torch.autograd.Function):
offset_size = sum(split_sizes[:i]) * np.prod(shape[split_dim+1:]) offset_size = sum(split_sizes[:i]) * np.prod(shape[split_dim+1:])
if (tensor.stride() != strides or if (tensor.stride() != strides or
list(tensor.shape) != shape_i or list(tensor.shape) != shape_i or
tensor.storage().data_ptr() != data_ptr or tensor.untyped_storage().data_ptr() != data_ptr or
tensor.storage_offset() != offset_size): tensor.storage_offset() != offset_size):
noop_ok = False noop_ok = False
break break
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment